001// Generated by the protocol buffer compiler.  DO NOT EDIT!
002// source: HAServiceProtocol.proto
003
004package org.apache.hadoop.ha.proto;
005
006public final class HAServiceProtocolProtos {
007  private HAServiceProtocolProtos() {}
008  public static void registerAllExtensions(
009      com.google.protobuf.ExtensionRegistry registry) {
010  }
011  public enum HAServiceStateProto
012      implements com.google.protobuf.ProtocolMessageEnum {
013    INITIALIZING(0, 0),
014    ACTIVE(1, 1),
015    STANDBY(2, 2),
016    ;
017    
018    public static final int INITIALIZING_VALUE = 0;
019    public static final int ACTIVE_VALUE = 1;
020    public static final int STANDBY_VALUE = 2;
021    
022    
023    public final int getNumber() { return value; }
024    
025    public static HAServiceStateProto valueOf(int value) {
026      switch (value) {
027        case 0: return INITIALIZING;
028        case 1: return ACTIVE;
029        case 2: return STANDBY;
030        default: return null;
031      }
032    }
033    
034    public static com.google.protobuf.Internal.EnumLiteMap<HAServiceStateProto>
035        internalGetValueMap() {
036      return internalValueMap;
037    }
038    private static com.google.protobuf.Internal.EnumLiteMap<HAServiceStateProto>
039        internalValueMap =
040          new com.google.protobuf.Internal.EnumLiteMap<HAServiceStateProto>() {
041            public HAServiceStateProto findValueByNumber(int number) {
042              return HAServiceStateProto.valueOf(number);
043            }
044          };
045    
046    public final com.google.protobuf.Descriptors.EnumValueDescriptor
047        getValueDescriptor() {
048      return getDescriptor().getValues().get(index);
049    }
050    public final com.google.protobuf.Descriptors.EnumDescriptor
051        getDescriptorForType() {
052      return getDescriptor();
053    }
054    public static final com.google.protobuf.Descriptors.EnumDescriptor
055        getDescriptor() {
056      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.getDescriptor().getEnumTypes().get(0);
057    }
058    
059    private static final HAServiceStateProto[] VALUES = {
060      INITIALIZING, ACTIVE, STANDBY, 
061    };
062    
063    public static HAServiceStateProto valueOf(
064        com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
065      if (desc.getType() != getDescriptor()) {
066        throw new java.lang.IllegalArgumentException(
067          "EnumValueDescriptor is not for this type.");
068      }
069      return VALUES[desc.getIndex()];
070    }
071    
072    private final int index;
073    private final int value;
074    
075    private HAServiceStateProto(int index, int value) {
076      this.index = index;
077      this.value = value;
078    }
079    
080    // @@protoc_insertion_point(enum_scope:hadoop.common.HAServiceStateProto)
081  }
082  
083  public enum HARequestSource
084      implements com.google.protobuf.ProtocolMessageEnum {
085    REQUEST_BY_USER(0, 0),
086    REQUEST_BY_USER_FORCED(1, 1),
087    REQUEST_BY_ZKFC(2, 2),
088    ;
089    
090    public static final int REQUEST_BY_USER_VALUE = 0;
091    public static final int REQUEST_BY_USER_FORCED_VALUE = 1;
092    public static final int REQUEST_BY_ZKFC_VALUE = 2;
093    
094    
095    public final int getNumber() { return value; }
096    
097    public static HARequestSource valueOf(int value) {
098      switch (value) {
099        case 0: return REQUEST_BY_USER;
100        case 1: return REQUEST_BY_USER_FORCED;
101        case 2: return REQUEST_BY_ZKFC;
102        default: return null;
103      }
104    }
105    
106    public static com.google.protobuf.Internal.EnumLiteMap<HARequestSource>
107        internalGetValueMap() {
108      return internalValueMap;
109    }
110    private static com.google.protobuf.Internal.EnumLiteMap<HARequestSource>
111        internalValueMap =
112          new com.google.protobuf.Internal.EnumLiteMap<HARequestSource>() {
113            public HARequestSource findValueByNumber(int number) {
114              return HARequestSource.valueOf(number);
115            }
116          };
117    
118    public final com.google.protobuf.Descriptors.EnumValueDescriptor
119        getValueDescriptor() {
120      return getDescriptor().getValues().get(index);
121    }
122    public final com.google.protobuf.Descriptors.EnumDescriptor
123        getDescriptorForType() {
124      return getDescriptor();
125    }
126    public static final com.google.protobuf.Descriptors.EnumDescriptor
127        getDescriptor() {
128      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.getDescriptor().getEnumTypes().get(1);
129    }
130    
131    private static final HARequestSource[] VALUES = {
132      REQUEST_BY_USER, REQUEST_BY_USER_FORCED, REQUEST_BY_ZKFC, 
133    };
134    
135    public static HARequestSource valueOf(
136        com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
137      if (desc.getType() != getDescriptor()) {
138        throw new java.lang.IllegalArgumentException(
139          "EnumValueDescriptor is not for this type.");
140      }
141      return VALUES[desc.getIndex()];
142    }
143    
144    private final int index;
145    private final int value;
146    
147    private HARequestSource(int index, int value) {
148      this.index = index;
149      this.value = value;
150    }
151    
152    // @@protoc_insertion_point(enum_scope:hadoop.common.HARequestSource)
153  }
154  
155  public interface HAStateChangeRequestInfoProtoOrBuilder
156      extends com.google.protobuf.MessageOrBuilder {
157    
158    // required .hadoop.common.HARequestSource reqSource = 1;
159    boolean hasReqSource();
160    org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HARequestSource getReqSource();
161  }
162  public static final class HAStateChangeRequestInfoProto extends
163      com.google.protobuf.GeneratedMessage
164      implements HAStateChangeRequestInfoProtoOrBuilder {
165    // Use HAStateChangeRequestInfoProto.newBuilder() to construct.
166    private HAStateChangeRequestInfoProto(Builder builder) {
167      super(builder);
168    }
169    private HAStateChangeRequestInfoProto(boolean noInit) {}
170    
171    private static final HAStateChangeRequestInfoProto defaultInstance;
172    public static HAStateChangeRequestInfoProto getDefaultInstance() {
173      return defaultInstance;
174    }
175    
176    public HAStateChangeRequestInfoProto getDefaultInstanceForType() {
177      return defaultInstance;
178    }
179    
180    public static final com.google.protobuf.Descriptors.Descriptor
181        getDescriptor() {
182      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_HAStateChangeRequestInfoProto_descriptor;
183    }
184    
185    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
186        internalGetFieldAccessorTable() {
187      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_HAStateChangeRequestInfoProto_fieldAccessorTable;
188    }
189    
190    private int bitField0_;
191    // required .hadoop.common.HARequestSource reqSource = 1;
192    public static final int REQSOURCE_FIELD_NUMBER = 1;
193    private org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HARequestSource reqSource_;
194    public boolean hasReqSource() {
195      return ((bitField0_ & 0x00000001) == 0x00000001);
196    }
197    public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HARequestSource getReqSource() {
198      return reqSource_;
199    }
200    
201    private void initFields() {
202      reqSource_ = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HARequestSource.REQUEST_BY_USER;
203    }
204    private byte memoizedIsInitialized = -1;
205    public final boolean isInitialized() {
206      byte isInitialized = memoizedIsInitialized;
207      if (isInitialized != -1) return isInitialized == 1;
208      
209      if (!hasReqSource()) {
210        memoizedIsInitialized = 0;
211        return false;
212      }
213      memoizedIsInitialized = 1;
214      return true;
215    }
216    
217    public void writeTo(com.google.protobuf.CodedOutputStream output)
218                        throws java.io.IOException {
219      getSerializedSize();
220      if (((bitField0_ & 0x00000001) == 0x00000001)) {
221        output.writeEnum(1, reqSource_.getNumber());
222      }
223      getUnknownFields().writeTo(output);
224    }
225    
226    private int memoizedSerializedSize = -1;
227    public int getSerializedSize() {
228      int size = memoizedSerializedSize;
229      if (size != -1) return size;
230    
231      size = 0;
232      if (((bitField0_ & 0x00000001) == 0x00000001)) {
233        size += com.google.protobuf.CodedOutputStream
234          .computeEnumSize(1, reqSource_.getNumber());
235      }
236      size += getUnknownFields().getSerializedSize();
237      memoizedSerializedSize = size;
238      return size;
239    }
240    
241    private static final long serialVersionUID = 0L;
242    @java.lang.Override
243    protected java.lang.Object writeReplace()
244        throws java.io.ObjectStreamException {
245      return super.writeReplace();
246    }
247    
248    @java.lang.Override
249    public boolean equals(final java.lang.Object obj) {
250      if (obj == this) {
251       return true;
252      }
253      if (!(obj instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto)) {
254        return super.equals(obj);
255      }
256      org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto other = (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto) obj;
257      
258      boolean result = true;
259      result = result && (hasReqSource() == other.hasReqSource());
260      if (hasReqSource()) {
261        result = result &&
262            (getReqSource() == other.getReqSource());
263      }
264      result = result &&
265          getUnknownFields().equals(other.getUnknownFields());
266      return result;
267    }
268    
269    @java.lang.Override
270    public int hashCode() {
271      int hash = 41;
272      hash = (19 * hash) + getDescriptorForType().hashCode();
273      if (hasReqSource()) {
274        hash = (37 * hash) + REQSOURCE_FIELD_NUMBER;
275        hash = (53 * hash) + hashEnum(getReqSource());
276      }
277      hash = (29 * hash) + getUnknownFields().hashCode();
278      return hash;
279    }
280    
281    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto parseFrom(
282        com.google.protobuf.ByteString data)
283        throws com.google.protobuf.InvalidProtocolBufferException {
284      return newBuilder().mergeFrom(data).buildParsed();
285    }
286    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto parseFrom(
287        com.google.protobuf.ByteString data,
288        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
289        throws com.google.protobuf.InvalidProtocolBufferException {
290      return newBuilder().mergeFrom(data, extensionRegistry)
291               .buildParsed();
292    }
293    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto parseFrom(byte[] data)
294        throws com.google.protobuf.InvalidProtocolBufferException {
295      return newBuilder().mergeFrom(data).buildParsed();
296    }
297    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto parseFrom(
298        byte[] data,
299        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
300        throws com.google.protobuf.InvalidProtocolBufferException {
301      return newBuilder().mergeFrom(data, extensionRegistry)
302               .buildParsed();
303    }
304    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto parseFrom(java.io.InputStream input)
305        throws java.io.IOException {
306      return newBuilder().mergeFrom(input).buildParsed();
307    }
308    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto parseFrom(
309        java.io.InputStream input,
310        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
311        throws java.io.IOException {
312      return newBuilder().mergeFrom(input, extensionRegistry)
313               .buildParsed();
314    }
315    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto parseDelimitedFrom(java.io.InputStream input)
316        throws java.io.IOException {
317      Builder builder = newBuilder();
318      if (builder.mergeDelimitedFrom(input)) {
319        return builder.buildParsed();
320      } else {
321        return null;
322      }
323    }
324    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto parseDelimitedFrom(
325        java.io.InputStream input,
326        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
327        throws java.io.IOException {
328      Builder builder = newBuilder();
329      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
330        return builder.buildParsed();
331      } else {
332        return null;
333      }
334    }
335    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto parseFrom(
336        com.google.protobuf.CodedInputStream input)
337        throws java.io.IOException {
338      return newBuilder().mergeFrom(input).buildParsed();
339    }
340    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto parseFrom(
341        com.google.protobuf.CodedInputStream input,
342        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
343        throws java.io.IOException {
344      return newBuilder().mergeFrom(input, extensionRegistry)
345               .buildParsed();
346    }
347    
348    public static Builder newBuilder() { return Builder.create(); }
349    public Builder newBuilderForType() { return newBuilder(); }
350    public static Builder newBuilder(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto prototype) {
351      return newBuilder().mergeFrom(prototype);
352    }
353    public Builder toBuilder() { return newBuilder(this); }
354    
355    @java.lang.Override
356    protected Builder newBuilderForType(
357        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
358      Builder builder = new Builder(parent);
359      return builder;
360    }
361    public static final class Builder extends
362        com.google.protobuf.GeneratedMessage.Builder<Builder>
363       implements org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProtoOrBuilder {
364      public static final com.google.protobuf.Descriptors.Descriptor
365          getDescriptor() {
366        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_HAStateChangeRequestInfoProto_descriptor;
367      }
368      
369      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
370          internalGetFieldAccessorTable() {
371        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_HAStateChangeRequestInfoProto_fieldAccessorTable;
372      }
373      
374      // Construct using org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.newBuilder()
375      private Builder() {
376        maybeForceBuilderInitialization();
377      }
378      
379      private Builder(BuilderParent parent) {
380        super(parent);
381        maybeForceBuilderInitialization();
382      }
383      private void maybeForceBuilderInitialization() {
384        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
385        }
386      }
387      private static Builder create() {
388        return new Builder();
389      }
390      
391      public Builder clear() {
392        super.clear();
393        reqSource_ = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HARequestSource.REQUEST_BY_USER;
394        bitField0_ = (bitField0_ & ~0x00000001);
395        return this;
396      }
397      
398      public Builder clone() {
399        return create().mergeFrom(buildPartial());
400      }
401      
402      public com.google.protobuf.Descriptors.Descriptor
403          getDescriptorForType() {
404        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.getDescriptor();
405      }
406      
407      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto getDefaultInstanceForType() {
408        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.getDefaultInstance();
409      }
410      
411      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto build() {
412        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto result = buildPartial();
413        if (!result.isInitialized()) {
414          throw newUninitializedMessageException(result);
415        }
416        return result;
417      }
418      
419      private org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto buildParsed()
420          throws com.google.protobuf.InvalidProtocolBufferException {
421        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto result = buildPartial();
422        if (!result.isInitialized()) {
423          throw newUninitializedMessageException(
424            result).asInvalidProtocolBufferException();
425        }
426        return result;
427      }
428      
429      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto buildPartial() {
430        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto result = new org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto(this);
431        int from_bitField0_ = bitField0_;
432        int to_bitField0_ = 0;
433        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
434          to_bitField0_ |= 0x00000001;
435        }
436        result.reqSource_ = reqSource_;
437        result.bitField0_ = to_bitField0_;
438        onBuilt();
439        return result;
440      }
441      
442      public Builder mergeFrom(com.google.protobuf.Message other) {
443        if (other instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto) {
444          return mergeFrom((org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto)other);
445        } else {
446          super.mergeFrom(other);
447          return this;
448        }
449      }
450      
451      public Builder mergeFrom(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto other) {
452        if (other == org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.getDefaultInstance()) return this;
453        if (other.hasReqSource()) {
454          setReqSource(other.getReqSource());
455        }
456        this.mergeUnknownFields(other.getUnknownFields());
457        return this;
458      }
459      
460      public final boolean isInitialized() {
461        if (!hasReqSource()) {
462          
463          return false;
464        }
465        return true;
466      }
467      
468      public Builder mergeFrom(
469          com.google.protobuf.CodedInputStream input,
470          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
471          throws java.io.IOException {
472        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
473          com.google.protobuf.UnknownFieldSet.newBuilder(
474            this.getUnknownFields());
475        while (true) {
476          int tag = input.readTag();
477          switch (tag) {
478            case 0:
479              this.setUnknownFields(unknownFields.build());
480              onChanged();
481              return this;
482            default: {
483              if (!parseUnknownField(input, unknownFields,
484                                     extensionRegistry, tag)) {
485                this.setUnknownFields(unknownFields.build());
486                onChanged();
487                return this;
488              }
489              break;
490            }
491            case 8: {
492              int rawValue = input.readEnum();
493              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HARequestSource value = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HARequestSource.valueOf(rawValue);
494              if (value == null) {
495                unknownFields.mergeVarintField(1, rawValue);
496              } else {
497                bitField0_ |= 0x00000001;
498                reqSource_ = value;
499              }
500              break;
501            }
502          }
503        }
504      }
505      
506      private int bitField0_;
507      
508      // required .hadoop.common.HARequestSource reqSource = 1;
509      private org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HARequestSource reqSource_ = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HARequestSource.REQUEST_BY_USER;
510      public boolean hasReqSource() {
511        return ((bitField0_ & 0x00000001) == 0x00000001);
512      }
513      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HARequestSource getReqSource() {
514        return reqSource_;
515      }
516      public Builder setReqSource(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HARequestSource value) {
517        if (value == null) {
518          throw new NullPointerException();
519        }
520        bitField0_ |= 0x00000001;
521        reqSource_ = value;
522        onChanged();
523        return this;
524      }
525      public Builder clearReqSource() {
526        bitField0_ = (bitField0_ & ~0x00000001);
527        reqSource_ = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HARequestSource.REQUEST_BY_USER;
528        onChanged();
529        return this;
530      }
531      
532      // @@protoc_insertion_point(builder_scope:hadoop.common.HAStateChangeRequestInfoProto)
533    }
534    
535    static {
536      defaultInstance = new HAStateChangeRequestInfoProto(true);
537      defaultInstance.initFields();
538    }
539    
540    // @@protoc_insertion_point(class_scope:hadoop.common.HAStateChangeRequestInfoProto)
541  }
542  
543  public interface MonitorHealthRequestProtoOrBuilder
544      extends com.google.protobuf.MessageOrBuilder {
545  }
546  public static final class MonitorHealthRequestProto extends
547      com.google.protobuf.GeneratedMessage
548      implements MonitorHealthRequestProtoOrBuilder {
549    // Use MonitorHealthRequestProto.newBuilder() to construct.
550    private MonitorHealthRequestProto(Builder builder) {
551      super(builder);
552    }
553    private MonitorHealthRequestProto(boolean noInit) {}
554    
555    private static final MonitorHealthRequestProto defaultInstance;
556    public static MonitorHealthRequestProto getDefaultInstance() {
557      return defaultInstance;
558    }
559    
560    public MonitorHealthRequestProto getDefaultInstanceForType() {
561      return defaultInstance;
562    }
563    
564    public static final com.google.protobuf.Descriptors.Descriptor
565        getDescriptor() {
566      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_MonitorHealthRequestProto_descriptor;
567    }
568    
569    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
570        internalGetFieldAccessorTable() {
571      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_MonitorHealthRequestProto_fieldAccessorTable;
572    }
573    
574    private void initFields() {
575    }
576    private byte memoizedIsInitialized = -1;
577    public final boolean isInitialized() {
578      byte isInitialized = memoizedIsInitialized;
579      if (isInitialized != -1) return isInitialized == 1;
580      
581      memoizedIsInitialized = 1;
582      return true;
583    }
584    
585    public void writeTo(com.google.protobuf.CodedOutputStream output)
586                        throws java.io.IOException {
587      getSerializedSize();
588      getUnknownFields().writeTo(output);
589    }
590    
591    private int memoizedSerializedSize = -1;
592    public int getSerializedSize() {
593      int size = memoizedSerializedSize;
594      if (size != -1) return size;
595    
596      size = 0;
597      size += getUnknownFields().getSerializedSize();
598      memoizedSerializedSize = size;
599      return size;
600    }
601    
602    private static final long serialVersionUID = 0L;
603    @java.lang.Override
604    protected java.lang.Object writeReplace()
605        throws java.io.ObjectStreamException {
606      return super.writeReplace();
607    }
608    
609    @java.lang.Override
610    public boolean equals(final java.lang.Object obj) {
611      if (obj == this) {
612       return true;
613      }
614      if (!(obj instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto)) {
615        return super.equals(obj);
616      }
617      org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto other = (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto) obj;
618      
619      boolean result = true;
620      result = result &&
621          getUnknownFields().equals(other.getUnknownFields());
622      return result;
623    }
624    
625    @java.lang.Override
626    public int hashCode() {
627      int hash = 41;
628      hash = (19 * hash) + getDescriptorForType().hashCode();
629      hash = (29 * hash) + getUnknownFields().hashCode();
630      return hash;
631    }
632    
633    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto parseFrom(
634        com.google.protobuf.ByteString data)
635        throws com.google.protobuf.InvalidProtocolBufferException {
636      return newBuilder().mergeFrom(data).buildParsed();
637    }
638    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto parseFrom(
639        com.google.protobuf.ByteString data,
640        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
641        throws com.google.protobuf.InvalidProtocolBufferException {
642      return newBuilder().mergeFrom(data, extensionRegistry)
643               .buildParsed();
644    }
645    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto parseFrom(byte[] data)
646        throws com.google.protobuf.InvalidProtocolBufferException {
647      return newBuilder().mergeFrom(data).buildParsed();
648    }
649    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto parseFrom(
650        byte[] data,
651        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
652        throws com.google.protobuf.InvalidProtocolBufferException {
653      return newBuilder().mergeFrom(data, extensionRegistry)
654               .buildParsed();
655    }
656    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto parseFrom(java.io.InputStream input)
657        throws java.io.IOException {
658      return newBuilder().mergeFrom(input).buildParsed();
659    }
660    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto parseFrom(
661        java.io.InputStream input,
662        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
663        throws java.io.IOException {
664      return newBuilder().mergeFrom(input, extensionRegistry)
665               .buildParsed();
666    }
667    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto parseDelimitedFrom(java.io.InputStream input)
668        throws java.io.IOException {
669      Builder builder = newBuilder();
670      if (builder.mergeDelimitedFrom(input)) {
671        return builder.buildParsed();
672      } else {
673        return null;
674      }
675    }
676    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto parseDelimitedFrom(
677        java.io.InputStream input,
678        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
679        throws java.io.IOException {
680      Builder builder = newBuilder();
681      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
682        return builder.buildParsed();
683      } else {
684        return null;
685      }
686    }
687    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto parseFrom(
688        com.google.protobuf.CodedInputStream input)
689        throws java.io.IOException {
690      return newBuilder().mergeFrom(input).buildParsed();
691    }
692    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto parseFrom(
693        com.google.protobuf.CodedInputStream input,
694        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
695        throws java.io.IOException {
696      return newBuilder().mergeFrom(input, extensionRegistry)
697               .buildParsed();
698    }
699    
700    public static Builder newBuilder() { return Builder.create(); }
701    public Builder newBuilderForType() { return newBuilder(); }
702    public static Builder newBuilder(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto prototype) {
703      return newBuilder().mergeFrom(prototype);
704    }
705    public Builder toBuilder() { return newBuilder(this); }
706    
707    @java.lang.Override
708    protected Builder newBuilderForType(
709        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
710      Builder builder = new Builder(parent);
711      return builder;
712    }
713    public static final class Builder extends
714        com.google.protobuf.GeneratedMessage.Builder<Builder>
715       implements org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProtoOrBuilder {
716      public static final com.google.protobuf.Descriptors.Descriptor
717          getDescriptor() {
718        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_MonitorHealthRequestProto_descriptor;
719      }
720      
721      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
722          internalGetFieldAccessorTable() {
723        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_MonitorHealthRequestProto_fieldAccessorTable;
724      }
725      
726      // Construct using org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto.newBuilder()
727      private Builder() {
728        maybeForceBuilderInitialization();
729      }
730      
731      private Builder(BuilderParent parent) {
732        super(parent);
733        maybeForceBuilderInitialization();
734      }
735      private void maybeForceBuilderInitialization() {
736        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
737        }
738      }
739      private static Builder create() {
740        return new Builder();
741      }
742      
743      public Builder clear() {
744        super.clear();
745        return this;
746      }
747      
748      public Builder clone() {
749        return create().mergeFrom(buildPartial());
750      }
751      
752      public com.google.protobuf.Descriptors.Descriptor
753          getDescriptorForType() {
754        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto.getDescriptor();
755      }
756      
757      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto getDefaultInstanceForType() {
758        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto.getDefaultInstance();
759      }
760      
761      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto build() {
762        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto result = buildPartial();
763        if (!result.isInitialized()) {
764          throw newUninitializedMessageException(result);
765        }
766        return result;
767      }
768      
769      private org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto buildParsed()
770          throws com.google.protobuf.InvalidProtocolBufferException {
771        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto result = buildPartial();
772        if (!result.isInitialized()) {
773          throw newUninitializedMessageException(
774            result).asInvalidProtocolBufferException();
775        }
776        return result;
777      }
778      
779      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto buildPartial() {
780        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto result = new org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto(this);
781        onBuilt();
782        return result;
783      }
784      
785      public Builder mergeFrom(com.google.protobuf.Message other) {
786        if (other instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto) {
787          return mergeFrom((org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto)other);
788        } else {
789          super.mergeFrom(other);
790          return this;
791        }
792      }
793      
794      public Builder mergeFrom(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto other) {
795        if (other == org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto.getDefaultInstance()) return this;
796        this.mergeUnknownFields(other.getUnknownFields());
797        return this;
798      }
799      
800      public final boolean isInitialized() {
801        return true;
802      }
803      
804      public Builder mergeFrom(
805          com.google.protobuf.CodedInputStream input,
806          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
807          throws java.io.IOException {
808        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
809          com.google.protobuf.UnknownFieldSet.newBuilder(
810            this.getUnknownFields());
811        while (true) {
812          int tag = input.readTag();
813          switch (tag) {
814            case 0:
815              this.setUnknownFields(unknownFields.build());
816              onChanged();
817              return this;
818            default: {
819              if (!parseUnknownField(input, unknownFields,
820                                     extensionRegistry, tag)) {
821                this.setUnknownFields(unknownFields.build());
822                onChanged();
823                return this;
824              }
825              break;
826            }
827          }
828        }
829      }
830      
831      
832      // @@protoc_insertion_point(builder_scope:hadoop.common.MonitorHealthRequestProto)
833    }
834    
835    static {
836      defaultInstance = new MonitorHealthRequestProto(true);
837      defaultInstance.initFields();
838    }
839    
840    // @@protoc_insertion_point(class_scope:hadoop.common.MonitorHealthRequestProto)
841  }
842  
843  public interface MonitorHealthResponseProtoOrBuilder
844      extends com.google.protobuf.MessageOrBuilder {
845  }
846  public static final class MonitorHealthResponseProto extends
847      com.google.protobuf.GeneratedMessage
848      implements MonitorHealthResponseProtoOrBuilder {
849    // Use MonitorHealthResponseProto.newBuilder() to construct.
850    private MonitorHealthResponseProto(Builder builder) {
851      super(builder);
852    }
853    private MonitorHealthResponseProto(boolean noInit) {}
854    
855    private static final MonitorHealthResponseProto defaultInstance;
856    public static MonitorHealthResponseProto getDefaultInstance() {
857      return defaultInstance;
858    }
859    
860    public MonitorHealthResponseProto getDefaultInstanceForType() {
861      return defaultInstance;
862    }
863    
864    public static final com.google.protobuf.Descriptors.Descriptor
865        getDescriptor() {
866      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_MonitorHealthResponseProto_descriptor;
867    }
868    
869    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
870        internalGetFieldAccessorTable() {
871      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_MonitorHealthResponseProto_fieldAccessorTable;
872    }
873    
874    private void initFields() {
875    }
876    private byte memoizedIsInitialized = -1;
877    public final boolean isInitialized() {
878      byte isInitialized = memoizedIsInitialized;
879      if (isInitialized != -1) return isInitialized == 1;
880      
881      memoizedIsInitialized = 1;
882      return true;
883    }
884    
885    public void writeTo(com.google.protobuf.CodedOutputStream output)
886                        throws java.io.IOException {
887      getSerializedSize();
888      getUnknownFields().writeTo(output);
889    }
890    
891    private int memoizedSerializedSize = -1;
892    public int getSerializedSize() {
893      int size = memoizedSerializedSize;
894      if (size != -1) return size;
895    
896      size = 0;
897      size += getUnknownFields().getSerializedSize();
898      memoizedSerializedSize = size;
899      return size;
900    }
901    
902    private static final long serialVersionUID = 0L;
903    @java.lang.Override
904    protected java.lang.Object writeReplace()
905        throws java.io.ObjectStreamException {
906      return super.writeReplace();
907    }
908    
909    @java.lang.Override
910    public boolean equals(final java.lang.Object obj) {
911      if (obj == this) {
912       return true;
913      }
914      if (!(obj instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto)) {
915        return super.equals(obj);
916      }
917      org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto other = (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto) obj;
918      
919      boolean result = true;
920      result = result &&
921          getUnknownFields().equals(other.getUnknownFields());
922      return result;
923    }
924    
925    @java.lang.Override
926    public int hashCode() {
927      int hash = 41;
928      hash = (19 * hash) + getDescriptorForType().hashCode();
929      hash = (29 * hash) + getUnknownFields().hashCode();
930      return hash;
931    }
932    
933    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto parseFrom(
934        com.google.protobuf.ByteString data)
935        throws com.google.protobuf.InvalidProtocolBufferException {
936      return newBuilder().mergeFrom(data).buildParsed();
937    }
938    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto parseFrom(
939        com.google.protobuf.ByteString data,
940        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
941        throws com.google.protobuf.InvalidProtocolBufferException {
942      return newBuilder().mergeFrom(data, extensionRegistry)
943               .buildParsed();
944    }
945    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto parseFrom(byte[] data)
946        throws com.google.protobuf.InvalidProtocolBufferException {
947      return newBuilder().mergeFrom(data).buildParsed();
948    }
949    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto parseFrom(
950        byte[] data,
951        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
952        throws com.google.protobuf.InvalidProtocolBufferException {
953      return newBuilder().mergeFrom(data, extensionRegistry)
954               .buildParsed();
955    }
956    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto parseFrom(java.io.InputStream input)
957        throws java.io.IOException {
958      return newBuilder().mergeFrom(input).buildParsed();
959    }
960    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto parseFrom(
961        java.io.InputStream input,
962        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
963        throws java.io.IOException {
964      return newBuilder().mergeFrom(input, extensionRegistry)
965               .buildParsed();
966    }
967    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto parseDelimitedFrom(java.io.InputStream input)
968        throws java.io.IOException {
969      Builder builder = newBuilder();
970      if (builder.mergeDelimitedFrom(input)) {
971        return builder.buildParsed();
972      } else {
973        return null;
974      }
975    }
976    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto parseDelimitedFrom(
977        java.io.InputStream input,
978        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
979        throws java.io.IOException {
980      Builder builder = newBuilder();
981      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
982        return builder.buildParsed();
983      } else {
984        return null;
985      }
986    }
987    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto parseFrom(
988        com.google.protobuf.CodedInputStream input)
989        throws java.io.IOException {
990      return newBuilder().mergeFrom(input).buildParsed();
991    }
992    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto parseFrom(
993        com.google.protobuf.CodedInputStream input,
994        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
995        throws java.io.IOException {
996      return newBuilder().mergeFrom(input, extensionRegistry)
997               .buildParsed();
998    }
999    
1000    public static Builder newBuilder() { return Builder.create(); }
1001    public Builder newBuilderForType() { return newBuilder(); }
1002    public static Builder newBuilder(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto prototype) {
1003      return newBuilder().mergeFrom(prototype);
1004    }
1005    public Builder toBuilder() { return newBuilder(this); }
1006    
1007    @java.lang.Override
1008    protected Builder newBuilderForType(
1009        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1010      Builder builder = new Builder(parent);
1011      return builder;
1012    }
1013    public static final class Builder extends
1014        com.google.protobuf.GeneratedMessage.Builder<Builder>
1015       implements org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProtoOrBuilder {
1016      public static final com.google.protobuf.Descriptors.Descriptor
1017          getDescriptor() {
1018        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_MonitorHealthResponseProto_descriptor;
1019      }
1020      
1021      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1022          internalGetFieldAccessorTable() {
1023        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_MonitorHealthResponseProto_fieldAccessorTable;
1024      }
1025      
1026      // Construct using org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto.newBuilder()
1027      private Builder() {
1028        maybeForceBuilderInitialization();
1029      }
1030      
1031      private Builder(BuilderParent parent) {
1032        super(parent);
1033        maybeForceBuilderInitialization();
1034      }
1035      private void maybeForceBuilderInitialization() {
1036        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
1037        }
1038      }
1039      private static Builder create() {
1040        return new Builder();
1041      }
1042      
1043      public Builder clear() {
1044        super.clear();
1045        return this;
1046      }
1047      
1048      public Builder clone() {
1049        return create().mergeFrom(buildPartial());
1050      }
1051      
1052      public com.google.protobuf.Descriptors.Descriptor
1053          getDescriptorForType() {
1054        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto.getDescriptor();
1055      }
1056      
1057      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto getDefaultInstanceForType() {
1058        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto.getDefaultInstance();
1059      }
1060      
1061      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto build() {
1062        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto result = buildPartial();
1063        if (!result.isInitialized()) {
1064          throw newUninitializedMessageException(result);
1065        }
1066        return result;
1067      }
1068      
1069      private org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto buildParsed()
1070          throws com.google.protobuf.InvalidProtocolBufferException {
1071        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto result = buildPartial();
1072        if (!result.isInitialized()) {
1073          throw newUninitializedMessageException(
1074            result).asInvalidProtocolBufferException();
1075        }
1076        return result;
1077      }
1078      
1079      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto buildPartial() {
1080        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto result = new org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto(this);
1081        onBuilt();
1082        return result;
1083      }
1084      
1085      public Builder mergeFrom(com.google.protobuf.Message other) {
1086        if (other instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto) {
1087          return mergeFrom((org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto)other);
1088        } else {
1089          super.mergeFrom(other);
1090          return this;
1091        }
1092      }
1093      
1094      public Builder mergeFrom(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto other) {
1095        if (other == org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto.getDefaultInstance()) return this;
1096        this.mergeUnknownFields(other.getUnknownFields());
1097        return this;
1098      }
1099      
1100      public final boolean isInitialized() {
1101        return true;
1102      }
1103      
1104      public Builder mergeFrom(
1105          com.google.protobuf.CodedInputStream input,
1106          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1107          throws java.io.IOException {
1108        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
1109          com.google.protobuf.UnknownFieldSet.newBuilder(
1110            this.getUnknownFields());
1111        while (true) {
1112          int tag = input.readTag();
1113          switch (tag) {
1114            case 0:
1115              this.setUnknownFields(unknownFields.build());
1116              onChanged();
1117              return this;
1118            default: {
1119              if (!parseUnknownField(input, unknownFields,
1120                                     extensionRegistry, tag)) {
1121                this.setUnknownFields(unknownFields.build());
1122                onChanged();
1123                return this;
1124              }
1125              break;
1126            }
1127          }
1128        }
1129      }
1130      
1131      
1132      // @@protoc_insertion_point(builder_scope:hadoop.common.MonitorHealthResponseProto)
1133    }
1134    
1135    static {
1136      defaultInstance = new MonitorHealthResponseProto(true);
1137      defaultInstance.initFields();
1138    }
1139    
1140    // @@protoc_insertion_point(class_scope:hadoop.common.MonitorHealthResponseProto)
1141  }
1142  
1143  public interface TransitionToActiveRequestProtoOrBuilder
1144      extends com.google.protobuf.MessageOrBuilder {
1145    
1146    // required .hadoop.common.HAStateChangeRequestInfoProto reqInfo = 1;
1147    boolean hasReqInfo();
1148    org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto getReqInfo();
1149    org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProtoOrBuilder getReqInfoOrBuilder();
1150  }
1151  public static final class TransitionToActiveRequestProto extends
1152      com.google.protobuf.GeneratedMessage
1153      implements TransitionToActiveRequestProtoOrBuilder {
1154    // Use TransitionToActiveRequestProto.newBuilder() to construct.
1155    private TransitionToActiveRequestProto(Builder builder) {
1156      super(builder);
1157    }
1158    private TransitionToActiveRequestProto(boolean noInit) {}
1159    
1160    private static final TransitionToActiveRequestProto defaultInstance;
1161    public static TransitionToActiveRequestProto getDefaultInstance() {
1162      return defaultInstance;
1163    }
1164    
1165    public TransitionToActiveRequestProto getDefaultInstanceForType() {
1166      return defaultInstance;
1167    }
1168    
1169    public static final com.google.protobuf.Descriptors.Descriptor
1170        getDescriptor() {
1171      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_TransitionToActiveRequestProto_descriptor;
1172    }
1173    
1174    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1175        internalGetFieldAccessorTable() {
1176      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_TransitionToActiveRequestProto_fieldAccessorTable;
1177    }
1178    
1179    private int bitField0_;
1180    // required .hadoop.common.HAStateChangeRequestInfoProto reqInfo = 1;
1181    public static final int REQINFO_FIELD_NUMBER = 1;
1182    private org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto reqInfo_;
1183    public boolean hasReqInfo() {
1184      return ((bitField0_ & 0x00000001) == 0x00000001);
1185    }
1186    public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto getReqInfo() {
1187      return reqInfo_;
1188    }
1189    public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProtoOrBuilder getReqInfoOrBuilder() {
1190      return reqInfo_;
1191    }
1192    
1193    private void initFields() {
1194      reqInfo_ = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.getDefaultInstance();
1195    }
1196    private byte memoizedIsInitialized = -1;
1197    public final boolean isInitialized() {
1198      byte isInitialized = memoizedIsInitialized;
1199      if (isInitialized != -1) return isInitialized == 1;
1200      
1201      if (!hasReqInfo()) {
1202        memoizedIsInitialized = 0;
1203        return false;
1204      }
1205      if (!getReqInfo().isInitialized()) {
1206        memoizedIsInitialized = 0;
1207        return false;
1208      }
1209      memoizedIsInitialized = 1;
1210      return true;
1211    }
1212    
1213    public void writeTo(com.google.protobuf.CodedOutputStream output)
1214                        throws java.io.IOException {
1215      getSerializedSize();
1216      if (((bitField0_ & 0x00000001) == 0x00000001)) {
1217        output.writeMessage(1, reqInfo_);
1218      }
1219      getUnknownFields().writeTo(output);
1220    }
1221    
1222    private int memoizedSerializedSize = -1;
1223    public int getSerializedSize() {
1224      int size = memoizedSerializedSize;
1225      if (size != -1) return size;
1226    
1227      size = 0;
1228      if (((bitField0_ & 0x00000001) == 0x00000001)) {
1229        size += com.google.protobuf.CodedOutputStream
1230          .computeMessageSize(1, reqInfo_);
1231      }
1232      size += getUnknownFields().getSerializedSize();
1233      memoizedSerializedSize = size;
1234      return size;
1235    }
1236    
1237    private static final long serialVersionUID = 0L;
1238    @java.lang.Override
1239    protected java.lang.Object writeReplace()
1240        throws java.io.ObjectStreamException {
1241      return super.writeReplace();
1242    }
1243    
1244    @java.lang.Override
1245    public boolean equals(final java.lang.Object obj) {
1246      if (obj == this) {
1247       return true;
1248      }
1249      if (!(obj instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto)) {
1250        return super.equals(obj);
1251      }
1252      org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto other = (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto) obj;
1253      
1254      boolean result = true;
1255      result = result && (hasReqInfo() == other.hasReqInfo());
1256      if (hasReqInfo()) {
1257        result = result && getReqInfo()
1258            .equals(other.getReqInfo());
1259      }
1260      result = result &&
1261          getUnknownFields().equals(other.getUnknownFields());
1262      return result;
1263    }
1264    
1265    @java.lang.Override
1266    public int hashCode() {
1267      int hash = 41;
1268      hash = (19 * hash) + getDescriptorForType().hashCode();
1269      if (hasReqInfo()) {
1270        hash = (37 * hash) + REQINFO_FIELD_NUMBER;
1271        hash = (53 * hash) + getReqInfo().hashCode();
1272      }
1273      hash = (29 * hash) + getUnknownFields().hashCode();
1274      return hash;
1275    }
1276    
1277    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto parseFrom(
1278        com.google.protobuf.ByteString data)
1279        throws com.google.protobuf.InvalidProtocolBufferException {
1280      return newBuilder().mergeFrom(data).buildParsed();
1281    }
1282    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto parseFrom(
1283        com.google.protobuf.ByteString data,
1284        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1285        throws com.google.protobuf.InvalidProtocolBufferException {
1286      return newBuilder().mergeFrom(data, extensionRegistry)
1287               .buildParsed();
1288    }
1289    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto parseFrom(byte[] data)
1290        throws com.google.protobuf.InvalidProtocolBufferException {
1291      return newBuilder().mergeFrom(data).buildParsed();
1292    }
1293    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto parseFrom(
1294        byte[] data,
1295        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1296        throws com.google.protobuf.InvalidProtocolBufferException {
1297      return newBuilder().mergeFrom(data, extensionRegistry)
1298               .buildParsed();
1299    }
1300    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto parseFrom(java.io.InputStream input)
1301        throws java.io.IOException {
1302      return newBuilder().mergeFrom(input).buildParsed();
1303    }
1304    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto parseFrom(
1305        java.io.InputStream input,
1306        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1307        throws java.io.IOException {
1308      return newBuilder().mergeFrom(input, extensionRegistry)
1309               .buildParsed();
1310    }
1311    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto parseDelimitedFrom(java.io.InputStream input)
1312        throws java.io.IOException {
1313      Builder builder = newBuilder();
1314      if (builder.mergeDelimitedFrom(input)) {
1315        return builder.buildParsed();
1316      } else {
1317        return null;
1318      }
1319    }
1320    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto parseDelimitedFrom(
1321        java.io.InputStream input,
1322        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1323        throws java.io.IOException {
1324      Builder builder = newBuilder();
1325      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
1326        return builder.buildParsed();
1327      } else {
1328        return null;
1329      }
1330    }
1331    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto parseFrom(
1332        com.google.protobuf.CodedInputStream input)
1333        throws java.io.IOException {
1334      return newBuilder().mergeFrom(input).buildParsed();
1335    }
1336    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto parseFrom(
1337        com.google.protobuf.CodedInputStream input,
1338        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1339        throws java.io.IOException {
1340      return newBuilder().mergeFrom(input, extensionRegistry)
1341               .buildParsed();
1342    }
1343    
1344    public static Builder newBuilder() { return Builder.create(); }
1345    public Builder newBuilderForType() { return newBuilder(); }
1346    public static Builder newBuilder(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto prototype) {
1347      return newBuilder().mergeFrom(prototype);
1348    }
1349    public Builder toBuilder() { return newBuilder(this); }
1350    
1351    @java.lang.Override
1352    protected Builder newBuilderForType(
1353        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1354      Builder builder = new Builder(parent);
1355      return builder;
1356    }
1357    public static final class Builder extends
1358        com.google.protobuf.GeneratedMessage.Builder<Builder>
1359       implements org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProtoOrBuilder {
1360      public static final com.google.protobuf.Descriptors.Descriptor
1361          getDescriptor() {
1362        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_TransitionToActiveRequestProto_descriptor;
1363      }
1364      
1365      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1366          internalGetFieldAccessorTable() {
1367        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_TransitionToActiveRequestProto_fieldAccessorTable;
1368      }
1369      
1370      // Construct using org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto.newBuilder()
1371      private Builder() {
1372        maybeForceBuilderInitialization();
1373      }
1374      
1375      private Builder(BuilderParent parent) {
1376        super(parent);
1377        maybeForceBuilderInitialization();
1378      }
1379      private void maybeForceBuilderInitialization() {
1380        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
1381          getReqInfoFieldBuilder();
1382        }
1383      }
1384      private static Builder create() {
1385        return new Builder();
1386      }
1387      
1388      public Builder clear() {
1389        super.clear();
1390        if (reqInfoBuilder_ == null) {
1391          reqInfo_ = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.getDefaultInstance();
1392        } else {
1393          reqInfoBuilder_.clear();
1394        }
1395        bitField0_ = (bitField0_ & ~0x00000001);
1396        return this;
1397      }
1398      
1399      public Builder clone() {
1400        return create().mergeFrom(buildPartial());
1401      }
1402      
1403      public com.google.protobuf.Descriptors.Descriptor
1404          getDescriptorForType() {
1405        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto.getDescriptor();
1406      }
1407      
1408      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto getDefaultInstanceForType() {
1409        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto.getDefaultInstance();
1410      }
1411      
1412      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto build() {
1413        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto result = buildPartial();
1414        if (!result.isInitialized()) {
1415          throw newUninitializedMessageException(result);
1416        }
1417        return result;
1418      }
1419      
1420      private org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto buildParsed()
1421          throws com.google.protobuf.InvalidProtocolBufferException {
1422        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto result = buildPartial();
1423        if (!result.isInitialized()) {
1424          throw newUninitializedMessageException(
1425            result).asInvalidProtocolBufferException();
1426        }
1427        return result;
1428      }
1429      
1430      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto buildPartial() {
1431        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto result = new org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto(this);
1432        int from_bitField0_ = bitField0_;
1433        int to_bitField0_ = 0;
1434        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
1435          to_bitField0_ |= 0x00000001;
1436        }
1437        if (reqInfoBuilder_ == null) {
1438          result.reqInfo_ = reqInfo_;
1439        } else {
1440          result.reqInfo_ = reqInfoBuilder_.build();
1441        }
1442        result.bitField0_ = to_bitField0_;
1443        onBuilt();
1444        return result;
1445      }
1446      
1447      public Builder mergeFrom(com.google.protobuf.Message other) {
1448        if (other instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto) {
1449          return mergeFrom((org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto)other);
1450        } else {
1451          super.mergeFrom(other);
1452          return this;
1453        }
1454      }
1455      
1456      public Builder mergeFrom(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto other) {
1457        if (other == org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto.getDefaultInstance()) return this;
1458        if (other.hasReqInfo()) {
1459          mergeReqInfo(other.getReqInfo());
1460        }
1461        this.mergeUnknownFields(other.getUnknownFields());
1462        return this;
1463      }
1464      
1465      public final boolean isInitialized() {
1466        if (!hasReqInfo()) {
1467          
1468          return false;
1469        }
1470        if (!getReqInfo().isInitialized()) {
1471          
1472          return false;
1473        }
1474        return true;
1475      }
1476      
1477      public Builder mergeFrom(
1478          com.google.protobuf.CodedInputStream input,
1479          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1480          throws java.io.IOException {
1481        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
1482          com.google.protobuf.UnknownFieldSet.newBuilder(
1483            this.getUnknownFields());
1484        while (true) {
1485          int tag = input.readTag();
1486          switch (tag) {
1487            case 0:
1488              this.setUnknownFields(unknownFields.build());
1489              onChanged();
1490              return this;
1491            default: {
1492              if (!parseUnknownField(input, unknownFields,
1493                                     extensionRegistry, tag)) {
1494                this.setUnknownFields(unknownFields.build());
1495                onChanged();
1496                return this;
1497              }
1498              break;
1499            }
1500            case 10: {
1501              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.Builder subBuilder = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.newBuilder();
1502              if (hasReqInfo()) {
1503                subBuilder.mergeFrom(getReqInfo());
1504              }
1505              input.readMessage(subBuilder, extensionRegistry);
1506              setReqInfo(subBuilder.buildPartial());
1507              break;
1508            }
1509          }
1510        }
1511      }
1512      
1513      private int bitField0_;
1514      
1515      // required .hadoop.common.HAStateChangeRequestInfoProto reqInfo = 1;
1516      private org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto reqInfo_ = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.getDefaultInstance();
1517      private com.google.protobuf.SingleFieldBuilder<
1518          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.Builder, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProtoOrBuilder> reqInfoBuilder_;
1519      public boolean hasReqInfo() {
1520        return ((bitField0_ & 0x00000001) == 0x00000001);
1521      }
1522      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto getReqInfo() {
1523        if (reqInfoBuilder_ == null) {
1524          return reqInfo_;
1525        } else {
1526          return reqInfoBuilder_.getMessage();
1527        }
1528      }
1529      public Builder setReqInfo(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto value) {
1530        if (reqInfoBuilder_ == null) {
1531          if (value == null) {
1532            throw new NullPointerException();
1533          }
1534          reqInfo_ = value;
1535          onChanged();
1536        } else {
1537          reqInfoBuilder_.setMessage(value);
1538        }
1539        bitField0_ |= 0x00000001;
1540        return this;
1541      }
1542      public Builder setReqInfo(
1543          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.Builder builderForValue) {
1544        if (reqInfoBuilder_ == null) {
1545          reqInfo_ = builderForValue.build();
1546          onChanged();
1547        } else {
1548          reqInfoBuilder_.setMessage(builderForValue.build());
1549        }
1550        bitField0_ |= 0x00000001;
1551        return this;
1552      }
1553      public Builder mergeReqInfo(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto value) {
1554        if (reqInfoBuilder_ == null) {
1555          if (((bitField0_ & 0x00000001) == 0x00000001) &&
1556              reqInfo_ != org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.getDefaultInstance()) {
1557            reqInfo_ =
1558              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.newBuilder(reqInfo_).mergeFrom(value).buildPartial();
1559          } else {
1560            reqInfo_ = value;
1561          }
1562          onChanged();
1563        } else {
1564          reqInfoBuilder_.mergeFrom(value);
1565        }
1566        bitField0_ |= 0x00000001;
1567        return this;
1568      }
1569      public Builder clearReqInfo() {
1570        if (reqInfoBuilder_ == null) {
1571          reqInfo_ = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.getDefaultInstance();
1572          onChanged();
1573        } else {
1574          reqInfoBuilder_.clear();
1575        }
1576        bitField0_ = (bitField0_ & ~0x00000001);
1577        return this;
1578      }
1579      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.Builder getReqInfoBuilder() {
1580        bitField0_ |= 0x00000001;
1581        onChanged();
1582        return getReqInfoFieldBuilder().getBuilder();
1583      }
1584      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProtoOrBuilder getReqInfoOrBuilder() {
1585        if (reqInfoBuilder_ != null) {
1586          return reqInfoBuilder_.getMessageOrBuilder();
1587        } else {
1588          return reqInfo_;
1589        }
1590      }
1591      private com.google.protobuf.SingleFieldBuilder<
1592          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.Builder, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProtoOrBuilder> 
1593          getReqInfoFieldBuilder() {
1594        if (reqInfoBuilder_ == null) {
1595          reqInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder<
1596              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.Builder, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProtoOrBuilder>(
1597                  reqInfo_,
1598                  getParentForChildren(),
1599                  isClean());
1600          reqInfo_ = null;
1601        }
1602        return reqInfoBuilder_;
1603      }
1604      
1605      // @@protoc_insertion_point(builder_scope:hadoop.common.TransitionToActiveRequestProto)
1606    }
1607    
1608    static {
1609      defaultInstance = new TransitionToActiveRequestProto(true);
1610      defaultInstance.initFields();
1611    }
1612    
1613    // @@protoc_insertion_point(class_scope:hadoop.common.TransitionToActiveRequestProto)
1614  }
1615  
1616  public interface TransitionToActiveResponseProtoOrBuilder
1617      extends com.google.protobuf.MessageOrBuilder {
1618  }
1619  public static final class TransitionToActiveResponseProto extends
1620      com.google.protobuf.GeneratedMessage
1621      implements TransitionToActiveResponseProtoOrBuilder {
1622    // Use TransitionToActiveResponseProto.newBuilder() to construct.
1623    private TransitionToActiveResponseProto(Builder builder) {
1624      super(builder);
1625    }
1626    private TransitionToActiveResponseProto(boolean noInit) {}
1627    
1628    private static final TransitionToActiveResponseProto defaultInstance;
1629    public static TransitionToActiveResponseProto getDefaultInstance() {
1630      return defaultInstance;
1631    }
1632    
1633    public TransitionToActiveResponseProto getDefaultInstanceForType() {
1634      return defaultInstance;
1635    }
1636    
1637    public static final com.google.protobuf.Descriptors.Descriptor
1638        getDescriptor() {
1639      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_TransitionToActiveResponseProto_descriptor;
1640    }
1641    
1642    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1643        internalGetFieldAccessorTable() {
1644      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_TransitionToActiveResponseProto_fieldAccessorTable;
1645    }
1646    
1647    private void initFields() {
1648    }
1649    private byte memoizedIsInitialized = -1;
1650    public final boolean isInitialized() {
1651      byte isInitialized = memoizedIsInitialized;
1652      if (isInitialized != -1) return isInitialized == 1;
1653      
1654      memoizedIsInitialized = 1;
1655      return true;
1656    }
1657    
1658    public void writeTo(com.google.protobuf.CodedOutputStream output)
1659                        throws java.io.IOException {
1660      getSerializedSize();
1661      getUnknownFields().writeTo(output);
1662    }
1663    
1664    private int memoizedSerializedSize = -1;
1665    public int getSerializedSize() {
1666      int size = memoizedSerializedSize;
1667      if (size != -1) return size;
1668    
1669      size = 0;
1670      size += getUnknownFields().getSerializedSize();
1671      memoizedSerializedSize = size;
1672      return size;
1673    }
1674    
1675    private static final long serialVersionUID = 0L;
1676    @java.lang.Override
1677    protected java.lang.Object writeReplace()
1678        throws java.io.ObjectStreamException {
1679      return super.writeReplace();
1680    }
1681    
1682    @java.lang.Override
1683    public boolean equals(final java.lang.Object obj) {
1684      if (obj == this) {
1685       return true;
1686      }
1687      if (!(obj instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto)) {
1688        return super.equals(obj);
1689      }
1690      org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto other = (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto) obj;
1691      
1692      boolean result = true;
1693      result = result &&
1694          getUnknownFields().equals(other.getUnknownFields());
1695      return result;
1696    }
1697    
1698    @java.lang.Override
1699    public int hashCode() {
1700      int hash = 41;
1701      hash = (19 * hash) + getDescriptorForType().hashCode();
1702      hash = (29 * hash) + getUnknownFields().hashCode();
1703      return hash;
1704    }
1705    
1706    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto parseFrom(
1707        com.google.protobuf.ByteString data)
1708        throws com.google.protobuf.InvalidProtocolBufferException {
1709      return newBuilder().mergeFrom(data).buildParsed();
1710    }
1711    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto parseFrom(
1712        com.google.protobuf.ByteString data,
1713        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1714        throws com.google.protobuf.InvalidProtocolBufferException {
1715      return newBuilder().mergeFrom(data, extensionRegistry)
1716               .buildParsed();
1717    }
1718    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto parseFrom(byte[] data)
1719        throws com.google.protobuf.InvalidProtocolBufferException {
1720      return newBuilder().mergeFrom(data).buildParsed();
1721    }
1722    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto parseFrom(
1723        byte[] data,
1724        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1725        throws com.google.protobuf.InvalidProtocolBufferException {
1726      return newBuilder().mergeFrom(data, extensionRegistry)
1727               .buildParsed();
1728    }
1729    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto parseFrom(java.io.InputStream input)
1730        throws java.io.IOException {
1731      return newBuilder().mergeFrom(input).buildParsed();
1732    }
1733    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto parseFrom(
1734        java.io.InputStream input,
1735        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1736        throws java.io.IOException {
1737      return newBuilder().mergeFrom(input, extensionRegistry)
1738               .buildParsed();
1739    }
1740    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto parseDelimitedFrom(java.io.InputStream input)
1741        throws java.io.IOException {
1742      Builder builder = newBuilder();
1743      if (builder.mergeDelimitedFrom(input)) {
1744        return builder.buildParsed();
1745      } else {
1746        return null;
1747      }
1748    }
1749    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto parseDelimitedFrom(
1750        java.io.InputStream input,
1751        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1752        throws java.io.IOException {
1753      Builder builder = newBuilder();
1754      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
1755        return builder.buildParsed();
1756      } else {
1757        return null;
1758      }
1759    }
1760    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto parseFrom(
1761        com.google.protobuf.CodedInputStream input)
1762        throws java.io.IOException {
1763      return newBuilder().mergeFrom(input).buildParsed();
1764    }
1765    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto parseFrom(
1766        com.google.protobuf.CodedInputStream input,
1767        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1768        throws java.io.IOException {
1769      return newBuilder().mergeFrom(input, extensionRegistry)
1770               .buildParsed();
1771    }
1772    
1773    public static Builder newBuilder() { return Builder.create(); }
1774    public Builder newBuilderForType() { return newBuilder(); }
1775    public static Builder newBuilder(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto prototype) {
1776      return newBuilder().mergeFrom(prototype);
1777    }
1778    public Builder toBuilder() { return newBuilder(this); }
1779    
1780    @java.lang.Override
1781    protected Builder newBuilderForType(
1782        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1783      Builder builder = new Builder(parent);
1784      return builder;
1785    }
1786    public static final class Builder extends
1787        com.google.protobuf.GeneratedMessage.Builder<Builder>
1788       implements org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProtoOrBuilder {
1789      public static final com.google.protobuf.Descriptors.Descriptor
1790          getDescriptor() {
1791        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_TransitionToActiveResponseProto_descriptor;
1792      }
1793      
1794      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1795          internalGetFieldAccessorTable() {
1796        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_TransitionToActiveResponseProto_fieldAccessorTable;
1797      }
1798      
1799      // Construct using org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto.newBuilder()
1800      private Builder() {
1801        maybeForceBuilderInitialization();
1802      }
1803      
1804      private Builder(BuilderParent parent) {
1805        super(parent);
1806        maybeForceBuilderInitialization();
1807      }
1808      private void maybeForceBuilderInitialization() {
1809        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
1810        }
1811      }
1812      private static Builder create() {
1813        return new Builder();
1814      }
1815      
1816      public Builder clear() {
1817        super.clear();
1818        return this;
1819      }
1820      
1821      public Builder clone() {
1822        return create().mergeFrom(buildPartial());
1823      }
1824      
1825      public com.google.protobuf.Descriptors.Descriptor
1826          getDescriptorForType() {
1827        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto.getDescriptor();
1828      }
1829      
1830      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto getDefaultInstanceForType() {
1831        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto.getDefaultInstance();
1832      }
1833      
1834      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto build() {
1835        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto result = buildPartial();
1836        if (!result.isInitialized()) {
1837          throw newUninitializedMessageException(result);
1838        }
1839        return result;
1840      }
1841      
1842      private org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto buildParsed()
1843          throws com.google.protobuf.InvalidProtocolBufferException {
1844        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto result = buildPartial();
1845        if (!result.isInitialized()) {
1846          throw newUninitializedMessageException(
1847            result).asInvalidProtocolBufferException();
1848        }
1849        return result;
1850      }
1851      
1852      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto buildPartial() {
1853        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto result = new org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto(this);
1854        onBuilt();
1855        return result;
1856      }
1857      
1858      public Builder mergeFrom(com.google.protobuf.Message other) {
1859        if (other instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto) {
1860          return mergeFrom((org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto)other);
1861        } else {
1862          super.mergeFrom(other);
1863          return this;
1864        }
1865      }
1866      
1867      public Builder mergeFrom(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto other) {
1868        if (other == org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto.getDefaultInstance()) return this;
1869        this.mergeUnknownFields(other.getUnknownFields());
1870        return this;
1871      }
1872      
1873      public final boolean isInitialized() {
1874        return true;
1875      }
1876      
1877      public Builder mergeFrom(
1878          com.google.protobuf.CodedInputStream input,
1879          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1880          throws java.io.IOException {
1881        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
1882          com.google.protobuf.UnknownFieldSet.newBuilder(
1883            this.getUnknownFields());
1884        while (true) {
1885          int tag = input.readTag();
1886          switch (tag) {
1887            case 0:
1888              this.setUnknownFields(unknownFields.build());
1889              onChanged();
1890              return this;
1891            default: {
1892              if (!parseUnknownField(input, unknownFields,
1893                                     extensionRegistry, tag)) {
1894                this.setUnknownFields(unknownFields.build());
1895                onChanged();
1896                return this;
1897              }
1898              break;
1899            }
1900          }
1901        }
1902      }
1903      
1904      
1905      // @@protoc_insertion_point(builder_scope:hadoop.common.TransitionToActiveResponseProto)
1906    }
1907    
1908    static {
1909      defaultInstance = new TransitionToActiveResponseProto(true);
1910      defaultInstance.initFields();
1911    }
1912    
1913    // @@protoc_insertion_point(class_scope:hadoop.common.TransitionToActiveResponseProto)
1914  }
1915  
1916  public interface TransitionToStandbyRequestProtoOrBuilder
1917      extends com.google.protobuf.MessageOrBuilder {
1918    
1919    // required .hadoop.common.HAStateChangeRequestInfoProto reqInfo = 1;
1920    boolean hasReqInfo();
1921    org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto getReqInfo();
1922    org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProtoOrBuilder getReqInfoOrBuilder();
1923  }
1924  public static final class TransitionToStandbyRequestProto extends
1925      com.google.protobuf.GeneratedMessage
1926      implements TransitionToStandbyRequestProtoOrBuilder {
1927    // Use TransitionToStandbyRequestProto.newBuilder() to construct.
1928    private TransitionToStandbyRequestProto(Builder builder) {
1929      super(builder);
1930    }
1931    private TransitionToStandbyRequestProto(boolean noInit) {}
1932    
1933    private static final TransitionToStandbyRequestProto defaultInstance;
1934    public static TransitionToStandbyRequestProto getDefaultInstance() {
1935      return defaultInstance;
1936    }
1937    
1938    public TransitionToStandbyRequestProto getDefaultInstanceForType() {
1939      return defaultInstance;
1940    }
1941    
1942    public static final com.google.protobuf.Descriptors.Descriptor
1943        getDescriptor() {
1944      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_TransitionToStandbyRequestProto_descriptor;
1945    }
1946    
1947    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1948        internalGetFieldAccessorTable() {
1949      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_TransitionToStandbyRequestProto_fieldAccessorTable;
1950    }
1951    
1952    private int bitField0_;
1953    // required .hadoop.common.HAStateChangeRequestInfoProto reqInfo = 1;
1954    public static final int REQINFO_FIELD_NUMBER = 1;
1955    private org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto reqInfo_;
1956    public boolean hasReqInfo() {
1957      return ((bitField0_ & 0x00000001) == 0x00000001);
1958    }
1959    public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto getReqInfo() {
1960      return reqInfo_;
1961    }
1962    public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProtoOrBuilder getReqInfoOrBuilder() {
1963      return reqInfo_;
1964    }
1965    
1966    private void initFields() {
1967      reqInfo_ = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.getDefaultInstance();
1968    }
1969    private byte memoizedIsInitialized = -1;
1970    public final boolean isInitialized() {
1971      byte isInitialized = memoizedIsInitialized;
1972      if (isInitialized != -1) return isInitialized == 1;
1973      
1974      if (!hasReqInfo()) {
1975        memoizedIsInitialized = 0;
1976        return false;
1977      }
1978      if (!getReqInfo().isInitialized()) {
1979        memoizedIsInitialized = 0;
1980        return false;
1981      }
1982      memoizedIsInitialized = 1;
1983      return true;
1984    }
1985    
1986    public void writeTo(com.google.protobuf.CodedOutputStream output)
1987                        throws java.io.IOException {
1988      getSerializedSize();
1989      if (((bitField0_ & 0x00000001) == 0x00000001)) {
1990        output.writeMessage(1, reqInfo_);
1991      }
1992      getUnknownFields().writeTo(output);
1993    }
1994    
1995    private int memoizedSerializedSize = -1;
1996    public int getSerializedSize() {
1997      int size = memoizedSerializedSize;
1998      if (size != -1) return size;
1999    
2000      size = 0;
2001      if (((bitField0_ & 0x00000001) == 0x00000001)) {
2002        size += com.google.protobuf.CodedOutputStream
2003          .computeMessageSize(1, reqInfo_);
2004      }
2005      size += getUnknownFields().getSerializedSize();
2006      memoizedSerializedSize = size;
2007      return size;
2008    }
2009    
2010    private static final long serialVersionUID = 0L;
2011    @java.lang.Override
2012    protected java.lang.Object writeReplace()
2013        throws java.io.ObjectStreamException {
2014      return super.writeReplace();
2015    }
2016    
2017    @java.lang.Override
2018    public boolean equals(final java.lang.Object obj) {
2019      if (obj == this) {
2020       return true;
2021      }
2022      if (!(obj instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto)) {
2023        return super.equals(obj);
2024      }
2025      org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto other = (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto) obj;
2026      
2027      boolean result = true;
2028      result = result && (hasReqInfo() == other.hasReqInfo());
2029      if (hasReqInfo()) {
2030        result = result && getReqInfo()
2031            .equals(other.getReqInfo());
2032      }
2033      result = result &&
2034          getUnknownFields().equals(other.getUnknownFields());
2035      return result;
2036    }
2037    
2038    @java.lang.Override
2039    public int hashCode() {
2040      int hash = 41;
2041      hash = (19 * hash) + getDescriptorForType().hashCode();
2042      if (hasReqInfo()) {
2043        hash = (37 * hash) + REQINFO_FIELD_NUMBER;
2044        hash = (53 * hash) + getReqInfo().hashCode();
2045      }
2046      hash = (29 * hash) + getUnknownFields().hashCode();
2047      return hash;
2048    }
2049    
2050    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto parseFrom(
2051        com.google.protobuf.ByteString data)
2052        throws com.google.protobuf.InvalidProtocolBufferException {
2053      return newBuilder().mergeFrom(data).buildParsed();
2054    }
2055    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto parseFrom(
2056        com.google.protobuf.ByteString data,
2057        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2058        throws com.google.protobuf.InvalidProtocolBufferException {
2059      return newBuilder().mergeFrom(data, extensionRegistry)
2060               .buildParsed();
2061    }
2062    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto parseFrom(byte[] data)
2063        throws com.google.protobuf.InvalidProtocolBufferException {
2064      return newBuilder().mergeFrom(data).buildParsed();
2065    }
2066    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto parseFrom(
2067        byte[] data,
2068        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2069        throws com.google.protobuf.InvalidProtocolBufferException {
2070      return newBuilder().mergeFrom(data, extensionRegistry)
2071               .buildParsed();
2072    }
2073    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto parseFrom(java.io.InputStream input)
2074        throws java.io.IOException {
2075      return newBuilder().mergeFrom(input).buildParsed();
2076    }
2077    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto parseFrom(
2078        java.io.InputStream input,
2079        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2080        throws java.io.IOException {
2081      return newBuilder().mergeFrom(input, extensionRegistry)
2082               .buildParsed();
2083    }
2084    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto parseDelimitedFrom(java.io.InputStream input)
2085        throws java.io.IOException {
2086      Builder builder = newBuilder();
2087      if (builder.mergeDelimitedFrom(input)) {
2088        return builder.buildParsed();
2089      } else {
2090        return null;
2091      }
2092    }
2093    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto parseDelimitedFrom(
2094        java.io.InputStream input,
2095        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2096        throws java.io.IOException {
2097      Builder builder = newBuilder();
2098      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
2099        return builder.buildParsed();
2100      } else {
2101        return null;
2102      }
2103    }
2104    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto parseFrom(
2105        com.google.protobuf.CodedInputStream input)
2106        throws java.io.IOException {
2107      return newBuilder().mergeFrom(input).buildParsed();
2108    }
2109    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto parseFrom(
2110        com.google.protobuf.CodedInputStream input,
2111        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2112        throws java.io.IOException {
2113      return newBuilder().mergeFrom(input, extensionRegistry)
2114               .buildParsed();
2115    }
2116    
2117    public static Builder newBuilder() { return Builder.create(); }
2118    public Builder newBuilderForType() { return newBuilder(); }
2119    public static Builder newBuilder(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto prototype) {
2120      return newBuilder().mergeFrom(prototype);
2121    }
2122    public Builder toBuilder() { return newBuilder(this); }
2123    
2124    @java.lang.Override
2125    protected Builder newBuilderForType(
2126        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2127      Builder builder = new Builder(parent);
2128      return builder;
2129    }
2130    public static final class Builder extends
2131        com.google.protobuf.GeneratedMessage.Builder<Builder>
2132       implements org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProtoOrBuilder {
2133      public static final com.google.protobuf.Descriptors.Descriptor
2134          getDescriptor() {
2135        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_TransitionToStandbyRequestProto_descriptor;
2136      }
2137      
2138      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
2139          internalGetFieldAccessorTable() {
2140        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_TransitionToStandbyRequestProto_fieldAccessorTable;
2141      }
2142      
2143      // Construct using org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto.newBuilder()
2144      private Builder() {
2145        maybeForceBuilderInitialization();
2146      }
2147      
2148      private Builder(BuilderParent parent) {
2149        super(parent);
2150        maybeForceBuilderInitialization();
2151      }
2152      private void maybeForceBuilderInitialization() {
2153        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
2154          getReqInfoFieldBuilder();
2155        }
2156      }
2157      private static Builder create() {
2158        return new Builder();
2159      }
2160      
2161      public Builder clear() {
2162        super.clear();
2163        if (reqInfoBuilder_ == null) {
2164          reqInfo_ = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.getDefaultInstance();
2165        } else {
2166          reqInfoBuilder_.clear();
2167        }
2168        bitField0_ = (bitField0_ & ~0x00000001);
2169        return this;
2170      }
2171      
2172      public Builder clone() {
2173        return create().mergeFrom(buildPartial());
2174      }
2175      
2176      public com.google.protobuf.Descriptors.Descriptor
2177          getDescriptorForType() {
2178        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto.getDescriptor();
2179      }
2180      
2181      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto getDefaultInstanceForType() {
2182        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto.getDefaultInstance();
2183      }
2184      
2185      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto build() {
2186        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto result = buildPartial();
2187        if (!result.isInitialized()) {
2188          throw newUninitializedMessageException(result);
2189        }
2190        return result;
2191      }
2192      
2193      private org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto buildParsed()
2194          throws com.google.protobuf.InvalidProtocolBufferException {
2195        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto result = buildPartial();
2196        if (!result.isInitialized()) {
2197          throw newUninitializedMessageException(
2198            result).asInvalidProtocolBufferException();
2199        }
2200        return result;
2201      }
2202      
2203      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto buildPartial() {
2204        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto result = new org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto(this);
2205        int from_bitField0_ = bitField0_;
2206        int to_bitField0_ = 0;
2207        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
2208          to_bitField0_ |= 0x00000001;
2209        }
2210        if (reqInfoBuilder_ == null) {
2211          result.reqInfo_ = reqInfo_;
2212        } else {
2213          result.reqInfo_ = reqInfoBuilder_.build();
2214        }
2215        result.bitField0_ = to_bitField0_;
2216        onBuilt();
2217        return result;
2218      }
2219      
2220      public Builder mergeFrom(com.google.protobuf.Message other) {
2221        if (other instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto) {
2222          return mergeFrom((org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto)other);
2223        } else {
2224          super.mergeFrom(other);
2225          return this;
2226        }
2227      }
2228      
2229      public Builder mergeFrom(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto other) {
2230        if (other == org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto.getDefaultInstance()) return this;
2231        if (other.hasReqInfo()) {
2232          mergeReqInfo(other.getReqInfo());
2233        }
2234        this.mergeUnknownFields(other.getUnknownFields());
2235        return this;
2236      }
2237      
2238      public final boolean isInitialized() {
2239        if (!hasReqInfo()) {
2240          
2241          return false;
2242        }
2243        if (!getReqInfo().isInitialized()) {
2244          
2245          return false;
2246        }
2247        return true;
2248      }
2249      
2250      public Builder mergeFrom(
2251          com.google.protobuf.CodedInputStream input,
2252          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2253          throws java.io.IOException {
2254        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
2255          com.google.protobuf.UnknownFieldSet.newBuilder(
2256            this.getUnknownFields());
2257        while (true) {
2258          int tag = input.readTag();
2259          switch (tag) {
2260            case 0:
2261              this.setUnknownFields(unknownFields.build());
2262              onChanged();
2263              return this;
2264            default: {
2265              if (!parseUnknownField(input, unknownFields,
2266                                     extensionRegistry, tag)) {
2267                this.setUnknownFields(unknownFields.build());
2268                onChanged();
2269                return this;
2270              }
2271              break;
2272            }
2273            case 10: {
2274              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.Builder subBuilder = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.newBuilder();
2275              if (hasReqInfo()) {
2276                subBuilder.mergeFrom(getReqInfo());
2277              }
2278              input.readMessage(subBuilder, extensionRegistry);
2279              setReqInfo(subBuilder.buildPartial());
2280              break;
2281            }
2282          }
2283        }
2284      }
2285      
2286      private int bitField0_;
2287      
2288      // required .hadoop.common.HAStateChangeRequestInfoProto reqInfo = 1;
2289      private org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto reqInfo_ = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.getDefaultInstance();
2290      private com.google.protobuf.SingleFieldBuilder<
2291          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.Builder, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProtoOrBuilder> reqInfoBuilder_;
2292      public boolean hasReqInfo() {
2293        return ((bitField0_ & 0x00000001) == 0x00000001);
2294      }
2295      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto getReqInfo() {
2296        if (reqInfoBuilder_ == null) {
2297          return reqInfo_;
2298        } else {
2299          return reqInfoBuilder_.getMessage();
2300        }
2301      }
2302      public Builder setReqInfo(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto value) {
2303        if (reqInfoBuilder_ == null) {
2304          if (value == null) {
2305            throw new NullPointerException();
2306          }
2307          reqInfo_ = value;
2308          onChanged();
2309        } else {
2310          reqInfoBuilder_.setMessage(value);
2311        }
2312        bitField0_ |= 0x00000001;
2313        return this;
2314      }
2315      public Builder setReqInfo(
2316          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.Builder builderForValue) {
2317        if (reqInfoBuilder_ == null) {
2318          reqInfo_ = builderForValue.build();
2319          onChanged();
2320        } else {
2321          reqInfoBuilder_.setMessage(builderForValue.build());
2322        }
2323        bitField0_ |= 0x00000001;
2324        return this;
2325      }
2326      public Builder mergeReqInfo(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto value) {
2327        if (reqInfoBuilder_ == null) {
2328          if (((bitField0_ & 0x00000001) == 0x00000001) &&
2329              reqInfo_ != org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.getDefaultInstance()) {
2330            reqInfo_ =
2331              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.newBuilder(reqInfo_).mergeFrom(value).buildPartial();
2332          } else {
2333            reqInfo_ = value;
2334          }
2335          onChanged();
2336        } else {
2337          reqInfoBuilder_.mergeFrom(value);
2338        }
2339        bitField0_ |= 0x00000001;
2340        return this;
2341      }
2342      public Builder clearReqInfo() {
2343        if (reqInfoBuilder_ == null) {
2344          reqInfo_ = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.getDefaultInstance();
2345          onChanged();
2346        } else {
2347          reqInfoBuilder_.clear();
2348        }
2349        bitField0_ = (bitField0_ & ~0x00000001);
2350        return this;
2351      }
2352      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.Builder getReqInfoBuilder() {
2353        bitField0_ |= 0x00000001;
2354        onChanged();
2355        return getReqInfoFieldBuilder().getBuilder();
2356      }
2357      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProtoOrBuilder getReqInfoOrBuilder() {
2358        if (reqInfoBuilder_ != null) {
2359          return reqInfoBuilder_.getMessageOrBuilder();
2360        } else {
2361          return reqInfo_;
2362        }
2363      }
2364      private com.google.protobuf.SingleFieldBuilder<
2365          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.Builder, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProtoOrBuilder> 
2366          getReqInfoFieldBuilder() {
2367        if (reqInfoBuilder_ == null) {
2368          reqInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder<
2369              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.Builder, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProtoOrBuilder>(
2370                  reqInfo_,
2371                  getParentForChildren(),
2372                  isClean());
2373          reqInfo_ = null;
2374        }
2375        return reqInfoBuilder_;
2376      }
2377      
2378      // @@protoc_insertion_point(builder_scope:hadoop.common.TransitionToStandbyRequestProto)
2379    }
2380    
2381    static {
2382      defaultInstance = new TransitionToStandbyRequestProto(true);
2383      defaultInstance.initFields();
2384    }
2385    
2386    // @@protoc_insertion_point(class_scope:hadoop.common.TransitionToStandbyRequestProto)
2387  }
2388  
2389  public interface TransitionToStandbyResponseProtoOrBuilder
2390      extends com.google.protobuf.MessageOrBuilder {
2391  }
2392  public static final class TransitionToStandbyResponseProto extends
2393      com.google.protobuf.GeneratedMessage
2394      implements TransitionToStandbyResponseProtoOrBuilder {
2395    // Use TransitionToStandbyResponseProto.newBuilder() to construct.
2396    private TransitionToStandbyResponseProto(Builder builder) {
2397      super(builder);
2398    }
2399    private TransitionToStandbyResponseProto(boolean noInit) {}
2400    
2401    private static final TransitionToStandbyResponseProto defaultInstance;
2402    public static TransitionToStandbyResponseProto getDefaultInstance() {
2403      return defaultInstance;
2404    }
2405    
2406    public TransitionToStandbyResponseProto getDefaultInstanceForType() {
2407      return defaultInstance;
2408    }
2409    
2410    public static final com.google.protobuf.Descriptors.Descriptor
2411        getDescriptor() {
2412      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_TransitionToStandbyResponseProto_descriptor;
2413    }
2414    
2415    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
2416        internalGetFieldAccessorTable() {
2417      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_TransitionToStandbyResponseProto_fieldAccessorTable;
2418    }
2419    
2420    private void initFields() {
2421    }
2422    private byte memoizedIsInitialized = -1;
2423    public final boolean isInitialized() {
2424      byte isInitialized = memoizedIsInitialized;
2425      if (isInitialized != -1) return isInitialized == 1;
2426      
2427      memoizedIsInitialized = 1;
2428      return true;
2429    }
2430    
2431    public void writeTo(com.google.protobuf.CodedOutputStream output)
2432                        throws java.io.IOException {
2433      getSerializedSize();
2434      getUnknownFields().writeTo(output);
2435    }
2436    
2437    private int memoizedSerializedSize = -1;
2438    public int getSerializedSize() {
2439      int size = memoizedSerializedSize;
2440      if (size != -1) return size;
2441    
2442      size = 0;
2443      size += getUnknownFields().getSerializedSize();
2444      memoizedSerializedSize = size;
2445      return size;
2446    }
2447    
2448    private static final long serialVersionUID = 0L;
2449    @java.lang.Override
2450    protected java.lang.Object writeReplace()
2451        throws java.io.ObjectStreamException {
2452      return super.writeReplace();
2453    }
2454    
2455    @java.lang.Override
2456    public boolean equals(final java.lang.Object obj) {
2457      if (obj == this) {
2458       return true;
2459      }
2460      if (!(obj instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto)) {
2461        return super.equals(obj);
2462      }
2463      org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto other = (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto) obj;
2464      
2465      boolean result = true;
2466      result = result &&
2467          getUnknownFields().equals(other.getUnknownFields());
2468      return result;
2469    }
2470    
2471    @java.lang.Override
2472    public int hashCode() {
2473      int hash = 41;
2474      hash = (19 * hash) + getDescriptorForType().hashCode();
2475      hash = (29 * hash) + getUnknownFields().hashCode();
2476      return hash;
2477    }
2478    
2479    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto parseFrom(
2480        com.google.protobuf.ByteString data)
2481        throws com.google.protobuf.InvalidProtocolBufferException {
2482      return newBuilder().mergeFrom(data).buildParsed();
2483    }
2484    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto parseFrom(
2485        com.google.protobuf.ByteString data,
2486        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2487        throws com.google.protobuf.InvalidProtocolBufferException {
2488      return newBuilder().mergeFrom(data, extensionRegistry)
2489               .buildParsed();
2490    }
2491    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto parseFrom(byte[] data)
2492        throws com.google.protobuf.InvalidProtocolBufferException {
2493      return newBuilder().mergeFrom(data).buildParsed();
2494    }
2495    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto parseFrom(
2496        byte[] data,
2497        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2498        throws com.google.protobuf.InvalidProtocolBufferException {
2499      return newBuilder().mergeFrom(data, extensionRegistry)
2500               .buildParsed();
2501    }
2502    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto parseFrom(java.io.InputStream input)
2503        throws java.io.IOException {
2504      return newBuilder().mergeFrom(input).buildParsed();
2505    }
2506    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto parseFrom(
2507        java.io.InputStream input,
2508        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2509        throws java.io.IOException {
2510      return newBuilder().mergeFrom(input, extensionRegistry)
2511               .buildParsed();
2512    }
2513    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto parseDelimitedFrom(java.io.InputStream input)
2514        throws java.io.IOException {
2515      Builder builder = newBuilder();
2516      if (builder.mergeDelimitedFrom(input)) {
2517        return builder.buildParsed();
2518      } else {
2519        return null;
2520      }
2521    }
2522    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto parseDelimitedFrom(
2523        java.io.InputStream input,
2524        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2525        throws java.io.IOException {
2526      Builder builder = newBuilder();
2527      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
2528        return builder.buildParsed();
2529      } else {
2530        return null;
2531      }
2532    }
2533    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto parseFrom(
2534        com.google.protobuf.CodedInputStream input)
2535        throws java.io.IOException {
2536      return newBuilder().mergeFrom(input).buildParsed();
2537    }
2538    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto parseFrom(
2539        com.google.protobuf.CodedInputStream input,
2540        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2541        throws java.io.IOException {
2542      return newBuilder().mergeFrom(input, extensionRegistry)
2543               .buildParsed();
2544    }
2545    
2546    public static Builder newBuilder() { return Builder.create(); }
2547    public Builder newBuilderForType() { return newBuilder(); }
2548    public static Builder newBuilder(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto prototype) {
2549      return newBuilder().mergeFrom(prototype);
2550    }
2551    public Builder toBuilder() { return newBuilder(this); }
2552    
2553    @java.lang.Override
2554    protected Builder newBuilderForType(
2555        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2556      Builder builder = new Builder(parent);
2557      return builder;
2558    }
2559    public static final class Builder extends
2560        com.google.protobuf.GeneratedMessage.Builder<Builder>
2561       implements org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProtoOrBuilder {
2562      public static final com.google.protobuf.Descriptors.Descriptor
2563          getDescriptor() {
2564        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_TransitionToStandbyResponseProto_descriptor;
2565      }
2566      
2567      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
2568          internalGetFieldAccessorTable() {
2569        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_TransitionToStandbyResponseProto_fieldAccessorTable;
2570      }
2571      
2572      // Construct using org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto.newBuilder()
2573      private Builder() {
2574        maybeForceBuilderInitialization();
2575      }
2576      
2577      private Builder(BuilderParent parent) {
2578        super(parent);
2579        maybeForceBuilderInitialization();
2580      }
2581      private void maybeForceBuilderInitialization() {
2582        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
2583        }
2584      }
2585      private static Builder create() {
2586        return new Builder();
2587      }
2588      
2589      public Builder clear() {
2590        super.clear();
2591        return this;
2592      }
2593      
2594      public Builder clone() {
2595        return create().mergeFrom(buildPartial());
2596      }
2597      
2598      public com.google.protobuf.Descriptors.Descriptor
2599          getDescriptorForType() {
2600        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto.getDescriptor();
2601      }
2602      
2603      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto getDefaultInstanceForType() {
2604        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto.getDefaultInstance();
2605      }
2606      
2607      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto build() {
2608        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto result = buildPartial();
2609        if (!result.isInitialized()) {
2610          throw newUninitializedMessageException(result);
2611        }
2612        return result;
2613      }
2614      
2615      private org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto buildParsed()
2616          throws com.google.protobuf.InvalidProtocolBufferException {
2617        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto result = buildPartial();
2618        if (!result.isInitialized()) {
2619          throw newUninitializedMessageException(
2620            result).asInvalidProtocolBufferException();
2621        }
2622        return result;
2623      }
2624      
2625      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto buildPartial() {
2626        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto result = new org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto(this);
2627        onBuilt();
2628        return result;
2629      }
2630      
2631      public Builder mergeFrom(com.google.protobuf.Message other) {
2632        if (other instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto) {
2633          return mergeFrom((org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto)other);
2634        } else {
2635          super.mergeFrom(other);
2636          return this;
2637        }
2638      }
2639      
2640      public Builder mergeFrom(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto other) {
2641        if (other == org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto.getDefaultInstance()) return this;
2642        this.mergeUnknownFields(other.getUnknownFields());
2643        return this;
2644      }
2645      
2646      public final boolean isInitialized() {
2647        return true;
2648      }
2649      
2650      public Builder mergeFrom(
2651          com.google.protobuf.CodedInputStream input,
2652          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2653          throws java.io.IOException {
2654        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
2655          com.google.protobuf.UnknownFieldSet.newBuilder(
2656            this.getUnknownFields());
2657        while (true) {
2658          int tag = input.readTag();
2659          switch (tag) {
2660            case 0:
2661              this.setUnknownFields(unknownFields.build());
2662              onChanged();
2663              return this;
2664            default: {
2665              if (!parseUnknownField(input, unknownFields,
2666                                     extensionRegistry, tag)) {
2667                this.setUnknownFields(unknownFields.build());
2668                onChanged();
2669                return this;
2670              }
2671              break;
2672            }
2673          }
2674        }
2675      }
2676      
2677      
2678      // @@protoc_insertion_point(builder_scope:hadoop.common.TransitionToStandbyResponseProto)
2679    }
2680    
2681    static {
2682      defaultInstance = new TransitionToStandbyResponseProto(true);
2683      defaultInstance.initFields();
2684    }
2685    
2686    // @@protoc_insertion_point(class_scope:hadoop.common.TransitionToStandbyResponseProto)
2687  }
2688  
2689  public interface GetServiceStatusRequestProtoOrBuilder
2690      extends com.google.protobuf.MessageOrBuilder {
2691  }
2692  public static final class GetServiceStatusRequestProto extends
2693      com.google.protobuf.GeneratedMessage
2694      implements GetServiceStatusRequestProtoOrBuilder {
2695    // Use GetServiceStatusRequestProto.newBuilder() to construct.
2696    private GetServiceStatusRequestProto(Builder builder) {
2697      super(builder);
2698    }
2699    private GetServiceStatusRequestProto(boolean noInit) {}
2700    
2701    private static final GetServiceStatusRequestProto defaultInstance;
2702    public static GetServiceStatusRequestProto getDefaultInstance() {
2703      return defaultInstance;
2704    }
2705    
2706    public GetServiceStatusRequestProto getDefaultInstanceForType() {
2707      return defaultInstance;
2708    }
2709    
2710    public static final com.google.protobuf.Descriptors.Descriptor
2711        getDescriptor() {
2712      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_GetServiceStatusRequestProto_descriptor;
2713    }
2714    
2715    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
2716        internalGetFieldAccessorTable() {
2717      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_GetServiceStatusRequestProto_fieldAccessorTable;
2718    }
2719    
2720    private void initFields() {
2721    }
2722    private byte memoizedIsInitialized = -1;
2723    public final boolean isInitialized() {
2724      byte isInitialized = memoizedIsInitialized;
2725      if (isInitialized != -1) return isInitialized == 1;
2726      
2727      memoizedIsInitialized = 1;
2728      return true;
2729    }
2730    
2731    public void writeTo(com.google.protobuf.CodedOutputStream output)
2732                        throws java.io.IOException {
2733      getSerializedSize();
2734      getUnknownFields().writeTo(output);
2735    }
2736    
2737    private int memoizedSerializedSize = -1;
2738    public int getSerializedSize() {
2739      int size = memoizedSerializedSize;
2740      if (size != -1) return size;
2741    
2742      size = 0;
2743      size += getUnknownFields().getSerializedSize();
2744      memoizedSerializedSize = size;
2745      return size;
2746    }
2747    
2748    private static final long serialVersionUID = 0L;
2749    @java.lang.Override
2750    protected java.lang.Object writeReplace()
2751        throws java.io.ObjectStreamException {
2752      return super.writeReplace();
2753    }
2754    
2755    @java.lang.Override
2756    public boolean equals(final java.lang.Object obj) {
2757      if (obj == this) {
2758       return true;
2759      }
2760      if (!(obj instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto)) {
2761        return super.equals(obj);
2762      }
2763      org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto other = (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto) obj;
2764      
2765      boolean result = true;
2766      result = result &&
2767          getUnknownFields().equals(other.getUnknownFields());
2768      return result;
2769    }
2770    
2771    @java.lang.Override
2772    public int hashCode() {
2773      int hash = 41;
2774      hash = (19 * hash) + getDescriptorForType().hashCode();
2775      hash = (29 * hash) + getUnknownFields().hashCode();
2776      return hash;
2777    }
2778    
2779    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto parseFrom(
2780        com.google.protobuf.ByteString data)
2781        throws com.google.protobuf.InvalidProtocolBufferException {
2782      return newBuilder().mergeFrom(data).buildParsed();
2783    }
2784    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto parseFrom(
2785        com.google.protobuf.ByteString data,
2786        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2787        throws com.google.protobuf.InvalidProtocolBufferException {
2788      return newBuilder().mergeFrom(data, extensionRegistry)
2789               .buildParsed();
2790    }
2791    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto parseFrom(byte[] data)
2792        throws com.google.protobuf.InvalidProtocolBufferException {
2793      return newBuilder().mergeFrom(data).buildParsed();
2794    }
2795    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto parseFrom(
2796        byte[] data,
2797        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2798        throws com.google.protobuf.InvalidProtocolBufferException {
2799      return newBuilder().mergeFrom(data, extensionRegistry)
2800               .buildParsed();
2801    }
2802    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto parseFrom(java.io.InputStream input)
2803        throws java.io.IOException {
2804      return newBuilder().mergeFrom(input).buildParsed();
2805    }
2806    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto parseFrom(
2807        java.io.InputStream input,
2808        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2809        throws java.io.IOException {
2810      return newBuilder().mergeFrom(input, extensionRegistry)
2811               .buildParsed();
2812    }
2813    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto parseDelimitedFrom(java.io.InputStream input)
2814        throws java.io.IOException {
2815      Builder builder = newBuilder();
2816      if (builder.mergeDelimitedFrom(input)) {
2817        return builder.buildParsed();
2818      } else {
2819        return null;
2820      }
2821    }
2822    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto parseDelimitedFrom(
2823        java.io.InputStream input,
2824        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2825        throws java.io.IOException {
2826      Builder builder = newBuilder();
2827      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
2828        return builder.buildParsed();
2829      } else {
2830        return null;
2831      }
2832    }
2833    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto parseFrom(
2834        com.google.protobuf.CodedInputStream input)
2835        throws java.io.IOException {
2836      return newBuilder().mergeFrom(input).buildParsed();
2837    }
2838    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto parseFrom(
2839        com.google.protobuf.CodedInputStream input,
2840        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2841        throws java.io.IOException {
2842      return newBuilder().mergeFrom(input, extensionRegistry)
2843               .buildParsed();
2844    }
2845    
2846    public static Builder newBuilder() { return Builder.create(); }
2847    public Builder newBuilderForType() { return newBuilder(); }
2848    public static Builder newBuilder(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto prototype) {
2849      return newBuilder().mergeFrom(prototype);
2850    }
2851    public Builder toBuilder() { return newBuilder(this); }
2852    
2853    @java.lang.Override
2854    protected Builder newBuilderForType(
2855        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2856      Builder builder = new Builder(parent);
2857      return builder;
2858    }
2859    public static final class Builder extends
2860        com.google.protobuf.GeneratedMessage.Builder<Builder>
2861       implements org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProtoOrBuilder {
2862      public static final com.google.protobuf.Descriptors.Descriptor
2863          getDescriptor() {
2864        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_GetServiceStatusRequestProto_descriptor;
2865      }
2866      
2867      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
2868          internalGetFieldAccessorTable() {
2869        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_GetServiceStatusRequestProto_fieldAccessorTable;
2870      }
2871      
2872      // Construct using org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto.newBuilder()
2873      private Builder() {
2874        maybeForceBuilderInitialization();
2875      }
2876      
2877      private Builder(BuilderParent parent) {
2878        super(parent);
2879        maybeForceBuilderInitialization();
2880      }
2881      private void maybeForceBuilderInitialization() {
2882        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
2883        }
2884      }
2885      private static Builder create() {
2886        return new Builder();
2887      }
2888      
2889      public Builder clear() {
2890        super.clear();
2891        return this;
2892      }
2893      
2894      public Builder clone() {
2895        return create().mergeFrom(buildPartial());
2896      }
2897      
2898      public com.google.protobuf.Descriptors.Descriptor
2899          getDescriptorForType() {
2900        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto.getDescriptor();
2901      }
2902      
2903      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto getDefaultInstanceForType() {
2904        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto.getDefaultInstance();
2905      }
2906      
2907      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto build() {
2908        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto result = buildPartial();
2909        if (!result.isInitialized()) {
2910          throw newUninitializedMessageException(result);
2911        }
2912        return result;
2913      }
2914      
2915      private org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto buildParsed()
2916          throws com.google.protobuf.InvalidProtocolBufferException {
2917        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto result = buildPartial();
2918        if (!result.isInitialized()) {
2919          throw newUninitializedMessageException(
2920            result).asInvalidProtocolBufferException();
2921        }
2922        return result;
2923      }
2924      
2925      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto buildPartial() {
2926        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto result = new org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto(this);
2927        onBuilt();
2928        return result;
2929      }
2930      
2931      public Builder mergeFrom(com.google.protobuf.Message other) {
2932        if (other instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto) {
2933          return mergeFrom((org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto)other);
2934        } else {
2935          super.mergeFrom(other);
2936          return this;
2937        }
2938      }
2939      
2940      public Builder mergeFrom(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto other) {
2941        if (other == org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto.getDefaultInstance()) return this;
2942        this.mergeUnknownFields(other.getUnknownFields());
2943        return this;
2944      }
2945      
2946      public final boolean isInitialized() {
2947        return true;
2948      }
2949      
2950      public Builder mergeFrom(
2951          com.google.protobuf.CodedInputStream input,
2952          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2953          throws java.io.IOException {
2954        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
2955          com.google.protobuf.UnknownFieldSet.newBuilder(
2956            this.getUnknownFields());
2957        while (true) {
2958          int tag = input.readTag();
2959          switch (tag) {
2960            case 0:
2961              this.setUnknownFields(unknownFields.build());
2962              onChanged();
2963              return this;
2964            default: {
2965              if (!parseUnknownField(input, unknownFields,
2966                                     extensionRegistry, tag)) {
2967                this.setUnknownFields(unknownFields.build());
2968                onChanged();
2969                return this;
2970              }
2971              break;
2972            }
2973          }
2974        }
2975      }
2976      
2977      
2978      // @@protoc_insertion_point(builder_scope:hadoop.common.GetServiceStatusRequestProto)
2979    }
2980    
2981    static {
2982      defaultInstance = new GetServiceStatusRequestProto(true);
2983      defaultInstance.initFields();
2984    }
2985    
2986    // @@protoc_insertion_point(class_scope:hadoop.common.GetServiceStatusRequestProto)
2987  }
2988  
2989  public interface GetServiceStatusResponseProtoOrBuilder
2990      extends com.google.protobuf.MessageOrBuilder {
2991    
2992    // required .hadoop.common.HAServiceStateProto state = 1;
2993    boolean hasState();
2994    org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceStateProto getState();
2995    
2996    // optional bool readyToBecomeActive = 2;
2997    boolean hasReadyToBecomeActive();
2998    boolean getReadyToBecomeActive();
2999    
3000    // optional string notReadyReason = 3;
3001    boolean hasNotReadyReason();
3002    String getNotReadyReason();
3003  }
3004  public static final class GetServiceStatusResponseProto extends
3005      com.google.protobuf.GeneratedMessage
3006      implements GetServiceStatusResponseProtoOrBuilder {
3007    // Use GetServiceStatusResponseProto.newBuilder() to construct.
3008    private GetServiceStatusResponseProto(Builder builder) {
3009      super(builder);
3010    }
3011    private GetServiceStatusResponseProto(boolean noInit) {}
3012    
3013    private static final GetServiceStatusResponseProto defaultInstance;
3014    public static GetServiceStatusResponseProto getDefaultInstance() {
3015      return defaultInstance;
3016    }
3017    
3018    public GetServiceStatusResponseProto getDefaultInstanceForType() {
3019      return defaultInstance;
3020    }
3021    
3022    public static final com.google.protobuf.Descriptors.Descriptor
3023        getDescriptor() {
3024      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_GetServiceStatusResponseProto_descriptor;
3025    }
3026    
3027    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
3028        internalGetFieldAccessorTable() {
3029      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_GetServiceStatusResponseProto_fieldAccessorTable;
3030    }
3031    
3032    private int bitField0_;
3033    // required .hadoop.common.HAServiceStateProto state = 1;
3034    public static final int STATE_FIELD_NUMBER = 1;
3035    private org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceStateProto state_;
3036    public boolean hasState() {
3037      return ((bitField0_ & 0x00000001) == 0x00000001);
3038    }
3039    public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceStateProto getState() {
3040      return state_;
3041    }
3042    
3043    // optional bool readyToBecomeActive = 2;
3044    public static final int READYTOBECOMEACTIVE_FIELD_NUMBER = 2;
3045    private boolean readyToBecomeActive_;
3046    public boolean hasReadyToBecomeActive() {
3047      return ((bitField0_ & 0x00000002) == 0x00000002);
3048    }
3049    public boolean getReadyToBecomeActive() {
3050      return readyToBecomeActive_;
3051    }
3052    
3053    // optional string notReadyReason = 3;
3054    public static final int NOTREADYREASON_FIELD_NUMBER = 3;
3055    private java.lang.Object notReadyReason_;
3056    public boolean hasNotReadyReason() {
3057      return ((bitField0_ & 0x00000004) == 0x00000004);
3058    }
3059    public String getNotReadyReason() {
3060      java.lang.Object ref = notReadyReason_;
3061      if (ref instanceof String) {
3062        return (String) ref;
3063      } else {
3064        com.google.protobuf.ByteString bs = 
3065            (com.google.protobuf.ByteString) ref;
3066        String s = bs.toStringUtf8();
3067        if (com.google.protobuf.Internal.isValidUtf8(bs)) {
3068          notReadyReason_ = s;
3069        }
3070        return s;
3071      }
3072    }
3073    private com.google.protobuf.ByteString getNotReadyReasonBytes() {
3074      java.lang.Object ref = notReadyReason_;
3075      if (ref instanceof String) {
3076        com.google.protobuf.ByteString b = 
3077            com.google.protobuf.ByteString.copyFromUtf8((String) ref);
3078        notReadyReason_ = b;
3079        return b;
3080      } else {
3081        return (com.google.protobuf.ByteString) ref;
3082      }
3083    }
3084    
3085    private void initFields() {
3086      state_ = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceStateProto.INITIALIZING;
3087      readyToBecomeActive_ = false;
3088      notReadyReason_ = "";
3089    }
3090    private byte memoizedIsInitialized = -1;
3091    public final boolean isInitialized() {
3092      byte isInitialized = memoizedIsInitialized;
3093      if (isInitialized != -1) return isInitialized == 1;
3094      
3095      if (!hasState()) {
3096        memoizedIsInitialized = 0;
3097        return false;
3098      }
3099      memoizedIsInitialized = 1;
3100      return true;
3101    }
3102    
3103    public void writeTo(com.google.protobuf.CodedOutputStream output)
3104                        throws java.io.IOException {
3105      getSerializedSize();
3106      if (((bitField0_ & 0x00000001) == 0x00000001)) {
3107        output.writeEnum(1, state_.getNumber());
3108      }
3109      if (((bitField0_ & 0x00000002) == 0x00000002)) {
3110        output.writeBool(2, readyToBecomeActive_);
3111      }
3112      if (((bitField0_ & 0x00000004) == 0x00000004)) {
3113        output.writeBytes(3, getNotReadyReasonBytes());
3114      }
3115      getUnknownFields().writeTo(output);
3116    }
3117    
3118    private int memoizedSerializedSize = -1;
3119    public int getSerializedSize() {
3120      int size = memoizedSerializedSize;
3121      if (size != -1) return size;
3122    
3123      size = 0;
3124      if (((bitField0_ & 0x00000001) == 0x00000001)) {
3125        size += com.google.protobuf.CodedOutputStream
3126          .computeEnumSize(1, state_.getNumber());
3127      }
3128      if (((bitField0_ & 0x00000002) == 0x00000002)) {
3129        size += com.google.protobuf.CodedOutputStream
3130          .computeBoolSize(2, readyToBecomeActive_);
3131      }
3132      if (((bitField0_ & 0x00000004) == 0x00000004)) {
3133        size += com.google.protobuf.CodedOutputStream
3134          .computeBytesSize(3, getNotReadyReasonBytes());
3135      }
3136      size += getUnknownFields().getSerializedSize();
3137      memoizedSerializedSize = size;
3138      return size;
3139    }
3140    
3141    private static final long serialVersionUID = 0L;
3142    @java.lang.Override
3143    protected java.lang.Object writeReplace()
3144        throws java.io.ObjectStreamException {
3145      return super.writeReplace();
3146    }
3147    
3148    @java.lang.Override
3149    public boolean equals(final java.lang.Object obj) {
3150      if (obj == this) {
3151       return true;
3152      }
3153      if (!(obj instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto)) {
3154        return super.equals(obj);
3155      }
3156      org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto other = (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto) obj;
3157      
3158      boolean result = true;
3159      result = result && (hasState() == other.hasState());
3160      if (hasState()) {
3161        result = result &&
3162            (getState() == other.getState());
3163      }
3164      result = result && (hasReadyToBecomeActive() == other.hasReadyToBecomeActive());
3165      if (hasReadyToBecomeActive()) {
3166        result = result && (getReadyToBecomeActive()
3167            == other.getReadyToBecomeActive());
3168      }
3169      result = result && (hasNotReadyReason() == other.hasNotReadyReason());
3170      if (hasNotReadyReason()) {
3171        result = result && getNotReadyReason()
3172            .equals(other.getNotReadyReason());
3173      }
3174      result = result &&
3175          getUnknownFields().equals(other.getUnknownFields());
3176      return result;
3177    }
3178    
3179    @java.lang.Override
3180    public int hashCode() {
3181      int hash = 41;
3182      hash = (19 * hash) + getDescriptorForType().hashCode();
3183      if (hasState()) {
3184        hash = (37 * hash) + STATE_FIELD_NUMBER;
3185        hash = (53 * hash) + hashEnum(getState());
3186      }
3187      if (hasReadyToBecomeActive()) {
3188        hash = (37 * hash) + READYTOBECOMEACTIVE_FIELD_NUMBER;
3189        hash = (53 * hash) + hashBoolean(getReadyToBecomeActive());
3190      }
3191      if (hasNotReadyReason()) {
3192        hash = (37 * hash) + NOTREADYREASON_FIELD_NUMBER;
3193        hash = (53 * hash) + getNotReadyReason().hashCode();
3194      }
3195      hash = (29 * hash) + getUnknownFields().hashCode();
3196      return hash;
3197    }
3198    
3199    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto parseFrom(
3200        com.google.protobuf.ByteString data)
3201        throws com.google.protobuf.InvalidProtocolBufferException {
3202      return newBuilder().mergeFrom(data).buildParsed();
3203    }
3204    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto parseFrom(
3205        com.google.protobuf.ByteString data,
3206        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3207        throws com.google.protobuf.InvalidProtocolBufferException {
3208      return newBuilder().mergeFrom(data, extensionRegistry)
3209               .buildParsed();
3210    }
3211    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto parseFrom(byte[] data)
3212        throws com.google.protobuf.InvalidProtocolBufferException {
3213      return newBuilder().mergeFrom(data).buildParsed();
3214    }
3215    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto parseFrom(
3216        byte[] data,
3217        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3218        throws com.google.protobuf.InvalidProtocolBufferException {
3219      return newBuilder().mergeFrom(data, extensionRegistry)
3220               .buildParsed();
3221    }
3222    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto parseFrom(java.io.InputStream input)
3223        throws java.io.IOException {
3224      return newBuilder().mergeFrom(input).buildParsed();
3225    }
3226    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto parseFrom(
3227        java.io.InputStream input,
3228        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3229        throws java.io.IOException {
3230      return newBuilder().mergeFrom(input, extensionRegistry)
3231               .buildParsed();
3232    }
3233    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto parseDelimitedFrom(java.io.InputStream input)
3234        throws java.io.IOException {
3235      Builder builder = newBuilder();
3236      if (builder.mergeDelimitedFrom(input)) {
3237        return builder.buildParsed();
3238      } else {
3239        return null;
3240      }
3241    }
3242    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto parseDelimitedFrom(
3243        java.io.InputStream input,
3244        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3245        throws java.io.IOException {
3246      Builder builder = newBuilder();
3247      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
3248        return builder.buildParsed();
3249      } else {
3250        return null;
3251      }
3252    }
3253    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto parseFrom(
3254        com.google.protobuf.CodedInputStream input)
3255        throws java.io.IOException {
3256      return newBuilder().mergeFrom(input).buildParsed();
3257    }
3258    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto parseFrom(
3259        com.google.protobuf.CodedInputStream input,
3260        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3261        throws java.io.IOException {
3262      return newBuilder().mergeFrom(input, extensionRegistry)
3263               .buildParsed();
3264    }
3265    
3266    public static Builder newBuilder() { return Builder.create(); }
3267    public Builder newBuilderForType() { return newBuilder(); }
3268    public static Builder newBuilder(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto prototype) {
3269      return newBuilder().mergeFrom(prototype);
3270    }
3271    public Builder toBuilder() { return newBuilder(this); }
3272    
3273    @java.lang.Override
3274    protected Builder newBuilderForType(
3275        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
3276      Builder builder = new Builder(parent);
3277      return builder;
3278    }
3279    public static final class Builder extends
3280        com.google.protobuf.GeneratedMessage.Builder<Builder>
3281       implements org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProtoOrBuilder {
3282      public static final com.google.protobuf.Descriptors.Descriptor
3283          getDescriptor() {
3284        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_GetServiceStatusResponseProto_descriptor;
3285      }
3286      
3287      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
3288          internalGetFieldAccessorTable() {
3289        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_GetServiceStatusResponseProto_fieldAccessorTable;
3290      }
3291      
3292      // Construct using org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto.newBuilder()
3293      private Builder() {
3294        maybeForceBuilderInitialization();
3295      }
3296      
3297      private Builder(BuilderParent parent) {
3298        super(parent);
3299        maybeForceBuilderInitialization();
3300      }
3301      private void maybeForceBuilderInitialization() {
3302        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
3303        }
3304      }
3305      private static Builder create() {
3306        return new Builder();
3307      }
3308      
3309      public Builder clear() {
3310        super.clear();
3311        state_ = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceStateProto.INITIALIZING;
3312        bitField0_ = (bitField0_ & ~0x00000001);
3313        readyToBecomeActive_ = false;
3314        bitField0_ = (bitField0_ & ~0x00000002);
3315        notReadyReason_ = "";
3316        bitField0_ = (bitField0_ & ~0x00000004);
3317        return this;
3318      }
3319      
3320      public Builder clone() {
3321        return create().mergeFrom(buildPartial());
3322      }
3323      
3324      public com.google.protobuf.Descriptors.Descriptor
3325          getDescriptorForType() {
3326        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto.getDescriptor();
3327      }
3328      
3329      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto getDefaultInstanceForType() {
3330        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto.getDefaultInstance();
3331      }
3332      
3333      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto build() {
3334        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto result = buildPartial();
3335        if (!result.isInitialized()) {
3336          throw newUninitializedMessageException(result);
3337        }
3338        return result;
3339      }
3340      
3341      private org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto buildParsed()
3342          throws com.google.protobuf.InvalidProtocolBufferException {
3343        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto result = buildPartial();
3344        if (!result.isInitialized()) {
3345          throw newUninitializedMessageException(
3346            result).asInvalidProtocolBufferException();
3347        }
3348        return result;
3349      }
3350      
3351      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto buildPartial() {
3352        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto result = new org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto(this);
3353        int from_bitField0_ = bitField0_;
3354        int to_bitField0_ = 0;
3355        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
3356          to_bitField0_ |= 0x00000001;
3357        }
3358        result.state_ = state_;
3359        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
3360          to_bitField0_ |= 0x00000002;
3361        }
3362        result.readyToBecomeActive_ = readyToBecomeActive_;
3363        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
3364          to_bitField0_ |= 0x00000004;
3365        }
3366        result.notReadyReason_ = notReadyReason_;
3367        result.bitField0_ = to_bitField0_;
3368        onBuilt();
3369        return result;
3370      }
3371      
3372      public Builder mergeFrom(com.google.protobuf.Message other) {
3373        if (other instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto) {
3374          return mergeFrom((org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto)other);
3375        } else {
3376          super.mergeFrom(other);
3377          return this;
3378        }
3379      }
3380      
3381      public Builder mergeFrom(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto other) {
3382        if (other == org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto.getDefaultInstance()) return this;
3383        if (other.hasState()) {
3384          setState(other.getState());
3385        }
3386        if (other.hasReadyToBecomeActive()) {
3387          setReadyToBecomeActive(other.getReadyToBecomeActive());
3388        }
3389        if (other.hasNotReadyReason()) {
3390          setNotReadyReason(other.getNotReadyReason());
3391        }
3392        this.mergeUnknownFields(other.getUnknownFields());
3393        return this;
3394      }
3395      
3396      public final boolean isInitialized() {
3397        if (!hasState()) {
3398          
3399          return false;
3400        }
3401        return true;
3402      }
3403      
3404      public Builder mergeFrom(
3405          com.google.protobuf.CodedInputStream input,
3406          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3407          throws java.io.IOException {
3408        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
3409          com.google.protobuf.UnknownFieldSet.newBuilder(
3410            this.getUnknownFields());
3411        while (true) {
3412          int tag = input.readTag();
3413          switch (tag) {
3414            case 0:
3415              this.setUnknownFields(unknownFields.build());
3416              onChanged();
3417              return this;
3418            default: {
3419              if (!parseUnknownField(input, unknownFields,
3420                                     extensionRegistry, tag)) {
3421                this.setUnknownFields(unknownFields.build());
3422                onChanged();
3423                return this;
3424              }
3425              break;
3426            }
3427            case 8: {
3428              int rawValue = input.readEnum();
3429              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceStateProto value = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceStateProto.valueOf(rawValue);
3430              if (value == null) {
3431                unknownFields.mergeVarintField(1, rawValue);
3432              } else {
3433                bitField0_ |= 0x00000001;
3434                state_ = value;
3435              }
3436              break;
3437            }
3438            case 16: {
3439              bitField0_ |= 0x00000002;
3440              readyToBecomeActive_ = input.readBool();
3441              break;
3442            }
3443            case 26: {
3444              bitField0_ |= 0x00000004;
3445              notReadyReason_ = input.readBytes();
3446              break;
3447            }
3448          }
3449        }
3450      }
3451      
3452      private int bitField0_;
3453      
3454      // required .hadoop.common.HAServiceStateProto state = 1;
3455      private org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceStateProto state_ = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceStateProto.INITIALIZING;
3456      public boolean hasState() {
3457        return ((bitField0_ & 0x00000001) == 0x00000001);
3458      }
3459      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceStateProto getState() {
3460        return state_;
3461      }
3462      public Builder setState(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceStateProto value) {
3463        if (value == null) {
3464          throw new NullPointerException();
3465        }
3466        bitField0_ |= 0x00000001;
3467        state_ = value;
3468        onChanged();
3469        return this;
3470      }
3471      public Builder clearState() {
3472        bitField0_ = (bitField0_ & ~0x00000001);
3473        state_ = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceStateProto.INITIALIZING;
3474        onChanged();
3475        return this;
3476      }
3477      
3478      // optional bool readyToBecomeActive = 2;
3479      private boolean readyToBecomeActive_ ;
3480      public boolean hasReadyToBecomeActive() {
3481        return ((bitField0_ & 0x00000002) == 0x00000002);
3482      }
3483      public boolean getReadyToBecomeActive() {
3484        return readyToBecomeActive_;
3485      }
3486      public Builder setReadyToBecomeActive(boolean value) {
3487        bitField0_ |= 0x00000002;
3488        readyToBecomeActive_ = value;
3489        onChanged();
3490        return this;
3491      }
3492      public Builder clearReadyToBecomeActive() {
3493        bitField0_ = (bitField0_ & ~0x00000002);
3494        readyToBecomeActive_ = false;
3495        onChanged();
3496        return this;
3497      }
3498      
3499      // optional string notReadyReason = 3;
3500      private java.lang.Object notReadyReason_ = "";
3501      public boolean hasNotReadyReason() {
3502        return ((bitField0_ & 0x00000004) == 0x00000004);
3503      }
3504      public String getNotReadyReason() {
3505        java.lang.Object ref = notReadyReason_;
3506        if (!(ref instanceof String)) {
3507          String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
3508          notReadyReason_ = s;
3509          return s;
3510        } else {
3511          return (String) ref;
3512        }
3513      }
3514      public Builder setNotReadyReason(String value) {
3515        if (value == null) {
3516    throw new NullPointerException();
3517  }
3518  bitField0_ |= 0x00000004;
3519        notReadyReason_ = value;
3520        onChanged();
3521        return this;
3522      }
3523      public Builder clearNotReadyReason() {
3524        bitField0_ = (bitField0_ & ~0x00000004);
3525        notReadyReason_ = getDefaultInstance().getNotReadyReason();
3526        onChanged();
3527        return this;
3528      }
3529      void setNotReadyReason(com.google.protobuf.ByteString value) {
3530        bitField0_ |= 0x00000004;
3531        notReadyReason_ = value;
3532        onChanged();
3533      }
3534      
3535      // @@protoc_insertion_point(builder_scope:hadoop.common.GetServiceStatusResponseProto)
3536    }
3537    
3538    static {
3539      defaultInstance = new GetServiceStatusResponseProto(true);
3540      defaultInstance.initFields();
3541    }
3542    
3543    // @@protoc_insertion_point(class_scope:hadoop.common.GetServiceStatusResponseProto)
3544  }
3545  
3546  public static abstract class HAServiceProtocolService
3547      implements com.google.protobuf.Service {
3548    protected HAServiceProtocolService() {}
3549    
3550    public interface Interface {
3551      public abstract void monitorHealth(
3552          com.google.protobuf.RpcController controller,
3553          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto request,
3554          com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto> done);
3555      
3556      public abstract void transitionToActive(
3557          com.google.protobuf.RpcController controller,
3558          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto request,
3559          com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto> done);
3560      
3561      public abstract void transitionToStandby(
3562          com.google.protobuf.RpcController controller,
3563          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto request,
3564          com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto> done);
3565      
3566      public abstract void getServiceStatus(
3567          com.google.protobuf.RpcController controller,
3568          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto request,
3569          com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto> done);
3570      
3571    }
3572    
3573    public static com.google.protobuf.Service newReflectiveService(
3574        final Interface impl) {
3575      return new HAServiceProtocolService() {
3576        @java.lang.Override
3577        public  void monitorHealth(
3578            com.google.protobuf.RpcController controller,
3579            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto request,
3580            com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto> done) {
3581          impl.monitorHealth(controller, request, done);
3582        }
3583        
3584        @java.lang.Override
3585        public  void transitionToActive(
3586            com.google.protobuf.RpcController controller,
3587            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto request,
3588            com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto> done) {
3589          impl.transitionToActive(controller, request, done);
3590        }
3591        
3592        @java.lang.Override
3593        public  void transitionToStandby(
3594            com.google.protobuf.RpcController controller,
3595            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto request,
3596            com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto> done) {
3597          impl.transitionToStandby(controller, request, done);
3598        }
3599        
3600        @java.lang.Override
3601        public  void getServiceStatus(
3602            com.google.protobuf.RpcController controller,
3603            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto request,
3604            com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto> done) {
3605          impl.getServiceStatus(controller, request, done);
3606        }
3607        
3608      };
3609    }
3610    
3611    public static com.google.protobuf.BlockingService
3612        newReflectiveBlockingService(final BlockingInterface impl) {
3613      return new com.google.protobuf.BlockingService() {
3614        public final com.google.protobuf.Descriptors.ServiceDescriptor
3615            getDescriptorForType() {
3616          return getDescriptor();
3617        }
3618        
3619        public final com.google.protobuf.Message callBlockingMethod(
3620            com.google.protobuf.Descriptors.MethodDescriptor method,
3621            com.google.protobuf.RpcController controller,
3622            com.google.protobuf.Message request)
3623            throws com.google.protobuf.ServiceException {
3624          if (method.getService() != getDescriptor()) {
3625            throw new java.lang.IllegalArgumentException(
3626              "Service.callBlockingMethod() given method descriptor for " +
3627              "wrong service type.");
3628          }
3629          switch(method.getIndex()) {
3630            case 0:
3631              return impl.monitorHealth(controller, (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto)request);
3632            case 1:
3633              return impl.transitionToActive(controller, (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto)request);
3634            case 2:
3635              return impl.transitionToStandby(controller, (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto)request);
3636            case 3:
3637              return impl.getServiceStatus(controller, (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto)request);
3638            default:
3639              throw new java.lang.AssertionError("Can't get here.");
3640          }
3641        }
3642        
3643        public final com.google.protobuf.Message
3644            getRequestPrototype(
3645            com.google.protobuf.Descriptors.MethodDescriptor method) {
3646          if (method.getService() != getDescriptor()) {
3647            throw new java.lang.IllegalArgumentException(
3648              "Service.getRequestPrototype() given method " +
3649              "descriptor for wrong service type.");
3650          }
3651          switch(method.getIndex()) {
3652            case 0:
3653              return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto.getDefaultInstance();
3654            case 1:
3655              return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto.getDefaultInstance();
3656            case 2:
3657              return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto.getDefaultInstance();
3658            case 3:
3659              return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto.getDefaultInstance();
3660            default:
3661              throw new java.lang.AssertionError("Can't get here.");
3662          }
3663        }
3664        
3665        public final com.google.protobuf.Message
3666            getResponsePrototype(
3667            com.google.protobuf.Descriptors.MethodDescriptor method) {
3668          if (method.getService() != getDescriptor()) {
3669            throw new java.lang.IllegalArgumentException(
3670              "Service.getResponsePrototype() given method " +
3671              "descriptor for wrong service type.");
3672          }
3673          switch(method.getIndex()) {
3674            case 0:
3675              return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto.getDefaultInstance();
3676            case 1:
3677              return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto.getDefaultInstance();
3678            case 2:
3679              return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto.getDefaultInstance();
3680            case 3:
3681              return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto.getDefaultInstance();
3682            default:
3683              throw new java.lang.AssertionError("Can't get here.");
3684          }
3685        }
3686        
3687      };
3688    }
3689    
3690    public abstract void monitorHealth(
3691        com.google.protobuf.RpcController controller,
3692        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto request,
3693        com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto> done);
3694    
3695    public abstract void transitionToActive(
3696        com.google.protobuf.RpcController controller,
3697        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto request,
3698        com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto> done);
3699    
3700    public abstract void transitionToStandby(
3701        com.google.protobuf.RpcController controller,
3702        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto request,
3703        com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto> done);
3704    
3705    public abstract void getServiceStatus(
3706        com.google.protobuf.RpcController controller,
3707        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto request,
3708        com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto> done);
3709    
3710    public static final
3711        com.google.protobuf.Descriptors.ServiceDescriptor
3712        getDescriptor() {
3713      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.getDescriptor().getServices().get(0);
3714    }
3715    public final com.google.protobuf.Descriptors.ServiceDescriptor
3716        getDescriptorForType() {
3717      return getDescriptor();
3718    }
3719    
3720    public final void callMethod(
3721        com.google.protobuf.Descriptors.MethodDescriptor method,
3722        com.google.protobuf.RpcController controller,
3723        com.google.protobuf.Message request,
3724        com.google.protobuf.RpcCallback<
3725          com.google.protobuf.Message> done) {
3726      if (method.getService() != getDescriptor()) {
3727        throw new java.lang.IllegalArgumentException(
3728          "Service.callMethod() given method descriptor for wrong " +
3729          "service type.");
3730      }
3731      switch(method.getIndex()) {
3732        case 0:
3733          this.monitorHealth(controller, (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto)request,
3734            com.google.protobuf.RpcUtil.<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto>specializeCallback(
3735              done));
3736          return;
3737        case 1:
3738          this.transitionToActive(controller, (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto)request,
3739            com.google.protobuf.RpcUtil.<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto>specializeCallback(
3740              done));
3741          return;
3742        case 2:
3743          this.transitionToStandby(controller, (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto)request,
3744            com.google.protobuf.RpcUtil.<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto>specializeCallback(
3745              done));
3746          return;
3747        case 3:
3748          this.getServiceStatus(controller, (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto)request,
3749            com.google.protobuf.RpcUtil.<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto>specializeCallback(
3750              done));
3751          return;
3752        default:
3753          throw new java.lang.AssertionError("Can't get here.");
3754      }
3755    }
3756    
3757    public final com.google.protobuf.Message
3758        getRequestPrototype(
3759        com.google.protobuf.Descriptors.MethodDescriptor method) {
3760      if (method.getService() != getDescriptor()) {
3761        throw new java.lang.IllegalArgumentException(
3762          "Service.getRequestPrototype() given method " +
3763          "descriptor for wrong service type.");
3764      }
3765      switch(method.getIndex()) {
3766        case 0:
3767          return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto.getDefaultInstance();
3768        case 1:
3769          return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto.getDefaultInstance();
3770        case 2:
3771          return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto.getDefaultInstance();
3772        case 3:
3773          return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto.getDefaultInstance();
3774        default:
3775          throw new java.lang.AssertionError("Can't get here.");
3776      }
3777    }
3778    
3779    public final com.google.protobuf.Message
3780        getResponsePrototype(
3781        com.google.protobuf.Descriptors.MethodDescriptor method) {
3782      if (method.getService() != getDescriptor()) {
3783        throw new java.lang.IllegalArgumentException(
3784          "Service.getResponsePrototype() given method " +
3785          "descriptor for wrong service type.");
3786      }
3787      switch(method.getIndex()) {
3788        case 0:
3789          return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto.getDefaultInstance();
3790        case 1:
3791          return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto.getDefaultInstance();
3792        case 2:
3793          return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto.getDefaultInstance();
3794        case 3:
3795          return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto.getDefaultInstance();
3796        default:
3797          throw new java.lang.AssertionError("Can't get here.");
3798      }
3799    }
3800    
3801    public static Stub newStub(
3802        com.google.protobuf.RpcChannel channel) {
3803      return new Stub(channel);
3804    }
3805    
3806    public static final class Stub extends org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceProtocolService implements Interface {
3807      private Stub(com.google.protobuf.RpcChannel channel) {
3808        this.channel = channel;
3809      }
3810      
3811      private final com.google.protobuf.RpcChannel channel;
3812      
3813      public com.google.protobuf.RpcChannel getChannel() {
3814        return channel;
3815      }
3816      
3817      public  void monitorHealth(
3818          com.google.protobuf.RpcController controller,
3819          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto request,
3820          com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto> done) {
3821        channel.callMethod(
3822          getDescriptor().getMethods().get(0),
3823          controller,
3824          request,
3825          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto.getDefaultInstance(),
3826          com.google.protobuf.RpcUtil.generalizeCallback(
3827            done,
3828            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto.class,
3829            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto.getDefaultInstance()));
3830      }
3831      
3832      public  void transitionToActive(
3833          com.google.protobuf.RpcController controller,
3834          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto request,
3835          com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto> done) {
3836        channel.callMethod(
3837          getDescriptor().getMethods().get(1),
3838          controller,
3839          request,
3840          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto.getDefaultInstance(),
3841          com.google.protobuf.RpcUtil.generalizeCallback(
3842            done,
3843            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto.class,
3844            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto.getDefaultInstance()));
3845      }
3846      
3847      public  void transitionToStandby(
3848          com.google.protobuf.RpcController controller,
3849          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto request,
3850          com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto> done) {
3851        channel.callMethod(
3852          getDescriptor().getMethods().get(2),
3853          controller,
3854          request,
3855          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto.getDefaultInstance(),
3856          com.google.protobuf.RpcUtil.generalizeCallback(
3857            done,
3858            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto.class,
3859            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto.getDefaultInstance()));
3860      }
3861      
3862      public  void getServiceStatus(
3863          com.google.protobuf.RpcController controller,
3864          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto request,
3865          com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto> done) {
3866        channel.callMethod(
3867          getDescriptor().getMethods().get(3),
3868          controller,
3869          request,
3870          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto.getDefaultInstance(),
3871          com.google.protobuf.RpcUtil.generalizeCallback(
3872            done,
3873            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto.class,
3874            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto.getDefaultInstance()));
3875      }
3876    }
3877    
3878    public static BlockingInterface newBlockingStub(
3879        com.google.protobuf.BlockingRpcChannel channel) {
3880      return new BlockingStub(channel);
3881    }
3882    
3883    public interface BlockingInterface {
3884      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto monitorHealth(
3885          com.google.protobuf.RpcController controller,
3886          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto request)
3887          throws com.google.protobuf.ServiceException;
3888      
3889      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto transitionToActive(
3890          com.google.protobuf.RpcController controller,
3891          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto request)
3892          throws com.google.protobuf.ServiceException;
3893      
3894      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto transitionToStandby(
3895          com.google.protobuf.RpcController controller,
3896          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto request)
3897          throws com.google.protobuf.ServiceException;
3898      
3899      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto getServiceStatus(
3900          com.google.protobuf.RpcController controller,
3901          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto request)
3902          throws com.google.protobuf.ServiceException;
3903    }
3904    
3905    private static final class BlockingStub implements BlockingInterface {
3906      private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
3907        this.channel = channel;
3908      }
3909      
3910      private final com.google.protobuf.BlockingRpcChannel channel;
3911      
3912      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto monitorHealth(
3913          com.google.protobuf.RpcController controller,
3914          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto request)
3915          throws com.google.protobuf.ServiceException {
3916        return (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto) channel.callBlockingMethod(
3917          getDescriptor().getMethods().get(0),
3918          controller,
3919          request,
3920          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto.getDefaultInstance());
3921      }
3922      
3923      
3924      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto transitionToActive(
3925          com.google.protobuf.RpcController controller,
3926          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto request)
3927          throws com.google.protobuf.ServiceException {
3928        return (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto) channel.callBlockingMethod(
3929          getDescriptor().getMethods().get(1),
3930          controller,
3931          request,
3932          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto.getDefaultInstance());
3933      }
3934      
3935      
3936      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto transitionToStandby(
3937          com.google.protobuf.RpcController controller,
3938          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto request)
3939          throws com.google.protobuf.ServiceException {
3940        return (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto) channel.callBlockingMethod(
3941          getDescriptor().getMethods().get(2),
3942          controller,
3943          request,
3944          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto.getDefaultInstance());
3945      }
3946      
3947      
3948      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto getServiceStatus(
3949          com.google.protobuf.RpcController controller,
3950          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto request)
3951          throws com.google.protobuf.ServiceException {
3952        return (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto) channel.callBlockingMethod(
3953          getDescriptor().getMethods().get(3),
3954          controller,
3955          request,
3956          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto.getDefaultInstance());
3957      }
3958      
3959    }
3960  }
3961  
3962  private static com.google.protobuf.Descriptors.Descriptor
3963    internal_static_hadoop_common_HAStateChangeRequestInfoProto_descriptor;
3964  private static
3965    com.google.protobuf.GeneratedMessage.FieldAccessorTable
3966      internal_static_hadoop_common_HAStateChangeRequestInfoProto_fieldAccessorTable;
3967  private static com.google.protobuf.Descriptors.Descriptor
3968    internal_static_hadoop_common_MonitorHealthRequestProto_descriptor;
3969  private static
3970    com.google.protobuf.GeneratedMessage.FieldAccessorTable
3971      internal_static_hadoop_common_MonitorHealthRequestProto_fieldAccessorTable;
3972  private static com.google.protobuf.Descriptors.Descriptor
3973    internal_static_hadoop_common_MonitorHealthResponseProto_descriptor;
3974  private static
3975    com.google.protobuf.GeneratedMessage.FieldAccessorTable
3976      internal_static_hadoop_common_MonitorHealthResponseProto_fieldAccessorTable;
3977  private static com.google.protobuf.Descriptors.Descriptor
3978    internal_static_hadoop_common_TransitionToActiveRequestProto_descriptor;
3979  private static
3980    com.google.protobuf.GeneratedMessage.FieldAccessorTable
3981      internal_static_hadoop_common_TransitionToActiveRequestProto_fieldAccessorTable;
3982  private static com.google.protobuf.Descriptors.Descriptor
3983    internal_static_hadoop_common_TransitionToActiveResponseProto_descriptor;
3984  private static
3985    com.google.protobuf.GeneratedMessage.FieldAccessorTable
3986      internal_static_hadoop_common_TransitionToActiveResponseProto_fieldAccessorTable;
3987  private static com.google.protobuf.Descriptors.Descriptor
3988    internal_static_hadoop_common_TransitionToStandbyRequestProto_descriptor;
3989  private static
3990    com.google.protobuf.GeneratedMessage.FieldAccessorTable
3991      internal_static_hadoop_common_TransitionToStandbyRequestProto_fieldAccessorTable;
3992  private static com.google.protobuf.Descriptors.Descriptor
3993    internal_static_hadoop_common_TransitionToStandbyResponseProto_descriptor;
3994  private static
3995    com.google.protobuf.GeneratedMessage.FieldAccessorTable
3996      internal_static_hadoop_common_TransitionToStandbyResponseProto_fieldAccessorTable;
3997  private static com.google.protobuf.Descriptors.Descriptor
3998    internal_static_hadoop_common_GetServiceStatusRequestProto_descriptor;
3999  private static
4000    com.google.protobuf.GeneratedMessage.FieldAccessorTable
4001      internal_static_hadoop_common_GetServiceStatusRequestProto_fieldAccessorTable;
4002  private static com.google.protobuf.Descriptors.Descriptor
4003    internal_static_hadoop_common_GetServiceStatusResponseProto_descriptor;
4004  private static
4005    com.google.protobuf.GeneratedMessage.FieldAccessorTable
4006      internal_static_hadoop_common_GetServiceStatusResponseProto_fieldAccessorTable;
4007  
4008  public static com.google.protobuf.Descriptors.FileDescriptor
4009      getDescriptor() {
4010    return descriptor;
4011  }
4012  private static com.google.protobuf.Descriptors.FileDescriptor
4013      descriptor;
4014  static {
4015    java.lang.String[] descriptorData = {
4016      "\n\027HAServiceProtocol.proto\022\rhadoop.common" +
4017      "\"R\n\035HAStateChangeRequestInfoProto\0221\n\treq" +
4018      "Source\030\001 \002(\0162\036.hadoop.common.HARequestSo" +
4019      "urce\"\033\n\031MonitorHealthRequestProto\"\034\n\032Mon" +
4020      "itorHealthResponseProto\"_\n\036TransitionToA" +
4021      "ctiveRequestProto\022=\n\007reqInfo\030\001 \002(\0132,.had" +
4022      "oop.common.HAStateChangeRequestInfoProto" +
4023      "\"!\n\037TransitionToActiveResponseProto\"`\n\037T" +
4024      "ransitionToStandbyRequestProto\022=\n\007reqInf" +
4025      "o\030\001 \002(\0132,.hadoop.common.HAStateChangeReq",
4026      "uestInfoProto\"\"\n TransitionToStandbyResp" +
4027      "onseProto\"\036\n\034GetServiceStatusRequestProt" +
4028      "o\"\207\001\n\035GetServiceStatusResponseProto\0221\n\005s" +
4029      "tate\030\001 \002(\0162\".hadoop.common.HAServiceStat" +
4030      "eProto\022\033\n\023readyToBecomeActive\030\002 \001(\010\022\026\n\016n" +
4031      "otReadyReason\030\003 \001(\t*@\n\023HAServiceStatePro" +
4032      "to\022\020\n\014INITIALIZING\020\000\022\n\n\006ACTIVE\020\001\022\013\n\007STAN" +
4033      "DBY\020\002*W\n\017HARequestSource\022\023\n\017REQUEST_BY_U" +
4034      "SER\020\000\022\032\n\026REQUEST_BY_USER_FORCED\020\001\022\023\n\017REQ" +
4035      "UEST_BY_ZKFC\020\0022\334\003\n\030HAServiceProtocolServ",
4036      "ice\022d\n\rmonitorHealth\022(.hadoop.common.Mon" +
4037      "itorHealthRequestProto\032).hadoop.common.M" +
4038      "onitorHealthResponseProto\022s\n\022transitionT" +
4039      "oActive\022-.hadoop.common.TransitionToActi" +
4040      "veRequestProto\032..hadoop.common.Transitio" +
4041      "nToActiveResponseProto\022v\n\023transitionToSt" +
4042      "andby\022..hadoop.common.TransitionToStandb" +
4043      "yRequestProto\032/.hadoop.common.Transition" +
4044      "ToStandbyResponseProto\022m\n\020getServiceStat" +
4045      "us\022+.hadoop.common.GetServiceStatusReque",
4046      "stProto\032,.hadoop.common.GetServiceStatus" +
4047      "ResponseProtoB;\n\032org.apache.hadoop.ha.pr" +
4048      "otoB\027HAServiceProtocolProtos\210\001\001\240\001\001"
4049    };
4050    com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
4051      new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
4052        public com.google.protobuf.ExtensionRegistry assignDescriptors(
4053            com.google.protobuf.Descriptors.FileDescriptor root) {
4054          descriptor = root;
4055          internal_static_hadoop_common_HAStateChangeRequestInfoProto_descriptor =
4056            getDescriptor().getMessageTypes().get(0);
4057          internal_static_hadoop_common_HAStateChangeRequestInfoProto_fieldAccessorTable = new
4058            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
4059              internal_static_hadoop_common_HAStateChangeRequestInfoProto_descriptor,
4060              new java.lang.String[] { "ReqSource", },
4061              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.class,
4062              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.Builder.class);
4063          internal_static_hadoop_common_MonitorHealthRequestProto_descriptor =
4064            getDescriptor().getMessageTypes().get(1);
4065          internal_static_hadoop_common_MonitorHealthRequestProto_fieldAccessorTable = new
4066            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
4067              internal_static_hadoop_common_MonitorHealthRequestProto_descriptor,
4068              new java.lang.String[] { },
4069              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto.class,
4070              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto.Builder.class);
4071          internal_static_hadoop_common_MonitorHealthResponseProto_descriptor =
4072            getDescriptor().getMessageTypes().get(2);
4073          internal_static_hadoop_common_MonitorHealthResponseProto_fieldAccessorTable = new
4074            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
4075              internal_static_hadoop_common_MonitorHealthResponseProto_descriptor,
4076              new java.lang.String[] { },
4077              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto.class,
4078              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto.Builder.class);
4079          internal_static_hadoop_common_TransitionToActiveRequestProto_descriptor =
4080            getDescriptor().getMessageTypes().get(3);
4081          internal_static_hadoop_common_TransitionToActiveRequestProto_fieldAccessorTable = new
4082            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
4083              internal_static_hadoop_common_TransitionToActiveRequestProto_descriptor,
4084              new java.lang.String[] { "ReqInfo", },
4085              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto.class,
4086              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto.Builder.class);
4087          internal_static_hadoop_common_TransitionToActiveResponseProto_descriptor =
4088            getDescriptor().getMessageTypes().get(4);
4089          internal_static_hadoop_common_TransitionToActiveResponseProto_fieldAccessorTable = new
4090            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
4091              internal_static_hadoop_common_TransitionToActiveResponseProto_descriptor,
4092              new java.lang.String[] { },
4093              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto.class,
4094              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto.Builder.class);
4095          internal_static_hadoop_common_TransitionToStandbyRequestProto_descriptor =
4096            getDescriptor().getMessageTypes().get(5);
4097          internal_static_hadoop_common_TransitionToStandbyRequestProto_fieldAccessorTable = new
4098            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
4099              internal_static_hadoop_common_TransitionToStandbyRequestProto_descriptor,
4100              new java.lang.String[] { "ReqInfo", },
4101              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto.class,
4102              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto.Builder.class);
4103          internal_static_hadoop_common_TransitionToStandbyResponseProto_descriptor =
4104            getDescriptor().getMessageTypes().get(6);
4105          internal_static_hadoop_common_TransitionToStandbyResponseProto_fieldAccessorTable = new
4106            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
4107              internal_static_hadoop_common_TransitionToStandbyResponseProto_descriptor,
4108              new java.lang.String[] { },
4109              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto.class,
4110              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto.Builder.class);
4111          internal_static_hadoop_common_GetServiceStatusRequestProto_descriptor =
4112            getDescriptor().getMessageTypes().get(7);
4113          internal_static_hadoop_common_GetServiceStatusRequestProto_fieldAccessorTable = new
4114            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
4115              internal_static_hadoop_common_GetServiceStatusRequestProto_descriptor,
4116              new java.lang.String[] { },
4117              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto.class,
4118              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto.Builder.class);
4119          internal_static_hadoop_common_GetServiceStatusResponseProto_descriptor =
4120            getDescriptor().getMessageTypes().get(8);
4121          internal_static_hadoop_common_GetServiceStatusResponseProto_fieldAccessorTable = new
4122            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
4123              internal_static_hadoop_common_GetServiceStatusResponseProto_descriptor,
4124              new java.lang.String[] { "State", "ReadyToBecomeActive", "NotReadyReason", },
4125              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto.class,
4126              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto.Builder.class);
4127          return null;
4128        }
4129      };
4130    com.google.protobuf.Descriptors.FileDescriptor
4131      .internalBuildGeneratedFileFrom(descriptorData,
4132        new com.google.protobuf.Descriptors.FileDescriptor[] {
4133        }, assigner);
4134  }
4135  
4136  // @@protoc_insertion_point(outer_class_scope)
4137}