001// Generated by the protocol buffer compiler.  DO NOT EDIT!
002// source: HAServiceProtocol.proto
003
004package org.apache.hadoop.ha.proto;
005
006public final class HAServiceProtocolProtos {
007  private HAServiceProtocolProtos() {}
008  public static void registerAllExtensions(
009      com.google.protobuf.ExtensionRegistry registry) {
010  }
011  /**
012   * Protobuf enum {@code hadoop.common.HAServiceStateProto}
013   */
014  public enum HAServiceStateProto
015      implements com.google.protobuf.ProtocolMessageEnum {
016    /**
017     * <code>INITIALIZING = 0;</code>
018     */
019    INITIALIZING(0, 0),
020    /**
021     * <code>ACTIVE = 1;</code>
022     */
023    ACTIVE(1, 1),
024    /**
025     * <code>STANDBY = 2;</code>
026     */
027    STANDBY(2, 2),
028    ;
029
030    /**
031     * <code>INITIALIZING = 0;</code>
032     */
033    public static final int INITIALIZING_VALUE = 0;
034    /**
035     * <code>ACTIVE = 1;</code>
036     */
037    public static final int ACTIVE_VALUE = 1;
038    /**
039     * <code>STANDBY = 2;</code>
040     */
041    public static final int STANDBY_VALUE = 2;
042
043
044    public final int getNumber() { return value; }
045
046    public static HAServiceStateProto valueOf(int value) {
047      switch (value) {
048        case 0: return INITIALIZING;
049        case 1: return ACTIVE;
050        case 2: return STANDBY;
051        default: return null;
052      }
053    }
054
055    public static com.google.protobuf.Internal.EnumLiteMap<HAServiceStateProto>
056        internalGetValueMap() {
057      return internalValueMap;
058    }
059    private static com.google.protobuf.Internal.EnumLiteMap<HAServiceStateProto>
060        internalValueMap =
061          new com.google.protobuf.Internal.EnumLiteMap<HAServiceStateProto>() {
062            public HAServiceStateProto findValueByNumber(int number) {
063              return HAServiceStateProto.valueOf(number);
064            }
065          };
066
067    public final com.google.protobuf.Descriptors.EnumValueDescriptor
068        getValueDescriptor() {
069      return getDescriptor().getValues().get(index);
070    }
071    public final com.google.protobuf.Descriptors.EnumDescriptor
072        getDescriptorForType() {
073      return getDescriptor();
074    }
075    public static final com.google.protobuf.Descriptors.EnumDescriptor
076        getDescriptor() {
077      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.getDescriptor().getEnumTypes().get(0);
078    }
079
080    private static final HAServiceStateProto[] VALUES = values();
081
082    public static HAServiceStateProto valueOf(
083        com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
084      if (desc.getType() != getDescriptor()) {
085        throw new java.lang.IllegalArgumentException(
086          "EnumValueDescriptor is not for this type.");
087      }
088      return VALUES[desc.getIndex()];
089    }
090
091    private final int index;
092    private final int value;
093
094    private HAServiceStateProto(int index, int value) {
095      this.index = index;
096      this.value = value;
097    }
098
099    // @@protoc_insertion_point(enum_scope:hadoop.common.HAServiceStateProto)
100  }
101
102  /**
103   * Protobuf enum {@code hadoop.common.HARequestSource}
104   */
105  public enum HARequestSource
106      implements com.google.protobuf.ProtocolMessageEnum {
107    /**
108     * <code>REQUEST_BY_USER = 0;</code>
109     */
110    REQUEST_BY_USER(0, 0),
111    /**
112     * <code>REQUEST_BY_USER_FORCED = 1;</code>
113     */
114    REQUEST_BY_USER_FORCED(1, 1),
115    /**
116     * <code>REQUEST_BY_ZKFC = 2;</code>
117     */
118    REQUEST_BY_ZKFC(2, 2),
119    ;
120
121    /**
122     * <code>REQUEST_BY_USER = 0;</code>
123     */
124    public static final int REQUEST_BY_USER_VALUE = 0;
125    /**
126     * <code>REQUEST_BY_USER_FORCED = 1;</code>
127     */
128    public static final int REQUEST_BY_USER_FORCED_VALUE = 1;
129    /**
130     * <code>REQUEST_BY_ZKFC = 2;</code>
131     */
132    public static final int REQUEST_BY_ZKFC_VALUE = 2;
133
134
135    public final int getNumber() { return value; }
136
137    public static HARequestSource valueOf(int value) {
138      switch (value) {
139        case 0: return REQUEST_BY_USER;
140        case 1: return REQUEST_BY_USER_FORCED;
141        case 2: return REQUEST_BY_ZKFC;
142        default: return null;
143      }
144    }
145
146    public static com.google.protobuf.Internal.EnumLiteMap<HARequestSource>
147        internalGetValueMap() {
148      return internalValueMap;
149    }
150    private static com.google.protobuf.Internal.EnumLiteMap<HARequestSource>
151        internalValueMap =
152          new com.google.protobuf.Internal.EnumLiteMap<HARequestSource>() {
153            public HARequestSource findValueByNumber(int number) {
154              return HARequestSource.valueOf(number);
155            }
156          };
157
158    public final com.google.protobuf.Descriptors.EnumValueDescriptor
159        getValueDescriptor() {
160      return getDescriptor().getValues().get(index);
161    }
162    public final com.google.protobuf.Descriptors.EnumDescriptor
163        getDescriptorForType() {
164      return getDescriptor();
165    }
166    public static final com.google.protobuf.Descriptors.EnumDescriptor
167        getDescriptor() {
168      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.getDescriptor().getEnumTypes().get(1);
169    }
170
171    private static final HARequestSource[] VALUES = values();
172
173    public static HARequestSource valueOf(
174        com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
175      if (desc.getType() != getDescriptor()) {
176        throw new java.lang.IllegalArgumentException(
177          "EnumValueDescriptor is not for this type.");
178      }
179      return VALUES[desc.getIndex()];
180    }
181
182    private final int index;
183    private final int value;
184
185    private HARequestSource(int index, int value) {
186      this.index = index;
187      this.value = value;
188    }
189
190    // @@protoc_insertion_point(enum_scope:hadoop.common.HARequestSource)
191  }
192
193  public interface HAStateChangeRequestInfoProtoOrBuilder
194      extends com.google.protobuf.MessageOrBuilder {
195
196    // required .hadoop.common.HARequestSource reqSource = 1;
197    /**
198     * <code>required .hadoop.common.HARequestSource reqSource = 1;</code>
199     */
200    boolean hasReqSource();
201    /**
202     * <code>required .hadoop.common.HARequestSource reqSource = 1;</code>
203     */
204    org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HARequestSource getReqSource();
205  }
206  /**
207   * Protobuf type {@code hadoop.common.HAStateChangeRequestInfoProto}
208   */
209  public static final class HAStateChangeRequestInfoProto extends
210      com.google.protobuf.GeneratedMessage
211      implements HAStateChangeRequestInfoProtoOrBuilder {
212    // Use HAStateChangeRequestInfoProto.newBuilder() to construct.
213    private HAStateChangeRequestInfoProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
214      super(builder);
215      this.unknownFields = builder.getUnknownFields();
216    }
217    private HAStateChangeRequestInfoProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
218
219    private static final HAStateChangeRequestInfoProto defaultInstance;
220    public static HAStateChangeRequestInfoProto getDefaultInstance() {
221      return defaultInstance;
222    }
223
224    public HAStateChangeRequestInfoProto getDefaultInstanceForType() {
225      return defaultInstance;
226    }
227
228    private final com.google.protobuf.UnknownFieldSet unknownFields;
229    @java.lang.Override
230    public final com.google.protobuf.UnknownFieldSet
231        getUnknownFields() {
232      return this.unknownFields;
233    }
234    private HAStateChangeRequestInfoProto(
235        com.google.protobuf.CodedInputStream input,
236        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
237        throws com.google.protobuf.InvalidProtocolBufferException {
238      initFields();
239      int mutable_bitField0_ = 0;
240      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
241          com.google.protobuf.UnknownFieldSet.newBuilder();
242      try {
243        boolean done = false;
244        while (!done) {
245          int tag = input.readTag();
246          switch (tag) {
247            case 0:
248              done = true;
249              break;
250            default: {
251              if (!parseUnknownField(input, unknownFields,
252                                     extensionRegistry, tag)) {
253                done = true;
254              }
255              break;
256            }
257            case 8: {
258              int rawValue = input.readEnum();
259              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HARequestSource value = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HARequestSource.valueOf(rawValue);
260              if (value == null) {
261                unknownFields.mergeVarintField(1, rawValue);
262              } else {
263                bitField0_ |= 0x00000001;
264                reqSource_ = value;
265              }
266              break;
267            }
268          }
269        }
270      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
271        throw e.setUnfinishedMessage(this);
272      } catch (java.io.IOException e) {
273        throw new com.google.protobuf.InvalidProtocolBufferException(
274            e.getMessage()).setUnfinishedMessage(this);
275      } finally {
276        this.unknownFields = unknownFields.build();
277        makeExtensionsImmutable();
278      }
279    }
280    public static final com.google.protobuf.Descriptors.Descriptor
281        getDescriptor() {
282      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_HAStateChangeRequestInfoProto_descriptor;
283    }
284
285    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
286        internalGetFieldAccessorTable() {
287      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_HAStateChangeRequestInfoProto_fieldAccessorTable
288          .ensureFieldAccessorsInitialized(
289              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.class, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.Builder.class);
290    }
291
292    public static com.google.protobuf.Parser<HAStateChangeRequestInfoProto> PARSER =
293        new com.google.protobuf.AbstractParser<HAStateChangeRequestInfoProto>() {
294      public HAStateChangeRequestInfoProto parsePartialFrom(
295          com.google.protobuf.CodedInputStream input,
296          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
297          throws com.google.protobuf.InvalidProtocolBufferException {
298        return new HAStateChangeRequestInfoProto(input, extensionRegistry);
299      }
300    };
301
302    @java.lang.Override
303    public com.google.protobuf.Parser<HAStateChangeRequestInfoProto> getParserForType() {
304      return PARSER;
305    }
306
307    private int bitField0_;
308    // required .hadoop.common.HARequestSource reqSource = 1;
309    public static final int REQSOURCE_FIELD_NUMBER = 1;
310    private org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HARequestSource reqSource_;
311    /**
312     * <code>required .hadoop.common.HARequestSource reqSource = 1;</code>
313     */
314    public boolean hasReqSource() {
315      return ((bitField0_ & 0x00000001) == 0x00000001);
316    }
317    /**
318     * <code>required .hadoop.common.HARequestSource reqSource = 1;</code>
319     */
320    public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HARequestSource getReqSource() {
321      return reqSource_;
322    }
323
324    private void initFields() {
325      reqSource_ = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HARequestSource.REQUEST_BY_USER;
326    }
327    private byte memoizedIsInitialized = -1;
328    public final boolean isInitialized() {
329      byte isInitialized = memoizedIsInitialized;
330      if (isInitialized != -1) return isInitialized == 1;
331
332      if (!hasReqSource()) {
333        memoizedIsInitialized = 0;
334        return false;
335      }
336      memoizedIsInitialized = 1;
337      return true;
338    }
339
340    public void writeTo(com.google.protobuf.CodedOutputStream output)
341                        throws java.io.IOException {
342      getSerializedSize();
343      if (((bitField0_ & 0x00000001) == 0x00000001)) {
344        output.writeEnum(1, reqSource_.getNumber());
345      }
346      getUnknownFields().writeTo(output);
347    }
348
349    private int memoizedSerializedSize = -1;
350    public int getSerializedSize() {
351      int size = memoizedSerializedSize;
352      if (size != -1) return size;
353
354      size = 0;
355      if (((bitField0_ & 0x00000001) == 0x00000001)) {
356        size += com.google.protobuf.CodedOutputStream
357          .computeEnumSize(1, reqSource_.getNumber());
358      }
359      size += getUnknownFields().getSerializedSize();
360      memoizedSerializedSize = size;
361      return size;
362    }
363
364    private static final long serialVersionUID = 0L;
365    @java.lang.Override
366    protected java.lang.Object writeReplace()
367        throws java.io.ObjectStreamException {
368      return super.writeReplace();
369    }
370
371    @java.lang.Override
372    public boolean equals(final java.lang.Object obj) {
373      if (obj == this) {
374       return true;
375      }
376      if (!(obj instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto)) {
377        return super.equals(obj);
378      }
379      org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto other = (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto) obj;
380
381      boolean result = true;
382      result = result && (hasReqSource() == other.hasReqSource());
383      if (hasReqSource()) {
384        result = result &&
385            (getReqSource() == other.getReqSource());
386      }
387      result = result &&
388          getUnknownFields().equals(other.getUnknownFields());
389      return result;
390    }
391
392    private int memoizedHashCode = 0;
393    @java.lang.Override
394    public int hashCode() {
395      if (memoizedHashCode != 0) {
396        return memoizedHashCode;
397      }
398      int hash = 41;
399      hash = (19 * hash) + getDescriptorForType().hashCode();
400      if (hasReqSource()) {
401        hash = (37 * hash) + REQSOURCE_FIELD_NUMBER;
402        hash = (53 * hash) + hashEnum(getReqSource());
403      }
404      hash = (29 * hash) + getUnknownFields().hashCode();
405      memoizedHashCode = hash;
406      return hash;
407    }
408
409    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto parseFrom(
410        com.google.protobuf.ByteString data)
411        throws com.google.protobuf.InvalidProtocolBufferException {
412      return PARSER.parseFrom(data);
413    }
414    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto parseFrom(
415        com.google.protobuf.ByteString data,
416        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
417        throws com.google.protobuf.InvalidProtocolBufferException {
418      return PARSER.parseFrom(data, extensionRegistry);
419    }
420    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto parseFrom(byte[] data)
421        throws com.google.protobuf.InvalidProtocolBufferException {
422      return PARSER.parseFrom(data);
423    }
424    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto parseFrom(
425        byte[] data,
426        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
427        throws com.google.protobuf.InvalidProtocolBufferException {
428      return PARSER.parseFrom(data, extensionRegistry);
429    }
430    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto parseFrom(java.io.InputStream input)
431        throws java.io.IOException {
432      return PARSER.parseFrom(input);
433    }
434    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto parseFrom(
435        java.io.InputStream input,
436        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
437        throws java.io.IOException {
438      return PARSER.parseFrom(input, extensionRegistry);
439    }
440    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto parseDelimitedFrom(java.io.InputStream input)
441        throws java.io.IOException {
442      return PARSER.parseDelimitedFrom(input);
443    }
444    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto parseDelimitedFrom(
445        java.io.InputStream input,
446        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
447        throws java.io.IOException {
448      return PARSER.parseDelimitedFrom(input, extensionRegistry);
449    }
450    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto parseFrom(
451        com.google.protobuf.CodedInputStream input)
452        throws java.io.IOException {
453      return PARSER.parseFrom(input);
454    }
455    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto parseFrom(
456        com.google.protobuf.CodedInputStream input,
457        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
458        throws java.io.IOException {
459      return PARSER.parseFrom(input, extensionRegistry);
460    }
461
462    public static Builder newBuilder() { return Builder.create(); }
463    public Builder newBuilderForType() { return newBuilder(); }
464    public static Builder newBuilder(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto prototype) {
465      return newBuilder().mergeFrom(prototype);
466    }
467    public Builder toBuilder() { return newBuilder(this); }
468
469    @java.lang.Override
470    protected Builder newBuilderForType(
471        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
472      Builder builder = new Builder(parent);
473      return builder;
474    }
475    /**
476     * Protobuf type {@code hadoop.common.HAStateChangeRequestInfoProto}
477     */
478    public static final class Builder extends
479        com.google.protobuf.GeneratedMessage.Builder<Builder>
480       implements org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProtoOrBuilder {
481      public static final com.google.protobuf.Descriptors.Descriptor
482          getDescriptor() {
483        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_HAStateChangeRequestInfoProto_descriptor;
484      }
485
486      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
487          internalGetFieldAccessorTable() {
488        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_HAStateChangeRequestInfoProto_fieldAccessorTable
489            .ensureFieldAccessorsInitialized(
490                org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.class, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.Builder.class);
491      }
492
493      // Construct using org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.newBuilder()
494      private Builder() {
495        maybeForceBuilderInitialization();
496      }
497
498      private Builder(
499          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
500        super(parent);
501        maybeForceBuilderInitialization();
502      }
503      private void maybeForceBuilderInitialization() {
504        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
505        }
506      }
507      private static Builder create() {
508        return new Builder();
509      }
510
511      public Builder clear() {
512        super.clear();
513        reqSource_ = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HARequestSource.REQUEST_BY_USER;
514        bitField0_ = (bitField0_ & ~0x00000001);
515        return this;
516      }
517
518      public Builder clone() {
519        return create().mergeFrom(buildPartial());
520      }
521
522      public com.google.protobuf.Descriptors.Descriptor
523          getDescriptorForType() {
524        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_HAStateChangeRequestInfoProto_descriptor;
525      }
526
527      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto getDefaultInstanceForType() {
528        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.getDefaultInstance();
529      }
530
531      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto build() {
532        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto result = buildPartial();
533        if (!result.isInitialized()) {
534          throw newUninitializedMessageException(result);
535        }
536        return result;
537      }
538
539      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto buildPartial() {
540        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto result = new org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto(this);
541        int from_bitField0_ = bitField0_;
542        int to_bitField0_ = 0;
543        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
544          to_bitField0_ |= 0x00000001;
545        }
546        result.reqSource_ = reqSource_;
547        result.bitField0_ = to_bitField0_;
548        onBuilt();
549        return result;
550      }
551
552      public Builder mergeFrom(com.google.protobuf.Message other) {
553        if (other instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto) {
554          return mergeFrom((org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto)other);
555        } else {
556          super.mergeFrom(other);
557          return this;
558        }
559      }
560
561      public Builder mergeFrom(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto other) {
562        if (other == org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.getDefaultInstance()) return this;
563        if (other.hasReqSource()) {
564          setReqSource(other.getReqSource());
565        }
566        this.mergeUnknownFields(other.getUnknownFields());
567        return this;
568      }
569
570      public final boolean isInitialized() {
571        if (!hasReqSource()) {
572          
573          return false;
574        }
575        return true;
576      }
577
578      public Builder mergeFrom(
579          com.google.protobuf.CodedInputStream input,
580          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
581          throws java.io.IOException {
582        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto parsedMessage = null;
583        try {
584          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
585        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
586          parsedMessage = (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto) e.getUnfinishedMessage();
587          throw e;
588        } finally {
589          if (parsedMessage != null) {
590            mergeFrom(parsedMessage);
591          }
592        }
593        return this;
594      }
595      private int bitField0_;
596
597      // required .hadoop.common.HARequestSource reqSource = 1;
598      private org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HARequestSource reqSource_ = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HARequestSource.REQUEST_BY_USER;
599      /**
600       * <code>required .hadoop.common.HARequestSource reqSource = 1;</code>
601       */
602      public boolean hasReqSource() {
603        return ((bitField0_ & 0x00000001) == 0x00000001);
604      }
605      /**
606       * <code>required .hadoop.common.HARequestSource reqSource = 1;</code>
607       */
608      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HARequestSource getReqSource() {
609        return reqSource_;
610      }
611      /**
612       * <code>required .hadoop.common.HARequestSource reqSource = 1;</code>
613       */
614      public Builder setReqSource(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HARequestSource value) {
615        if (value == null) {
616          throw new NullPointerException();
617        }
618        bitField0_ |= 0x00000001;
619        reqSource_ = value;
620        onChanged();
621        return this;
622      }
623      /**
624       * <code>required .hadoop.common.HARequestSource reqSource = 1;</code>
625       */
626      public Builder clearReqSource() {
627        bitField0_ = (bitField0_ & ~0x00000001);
628        reqSource_ = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HARequestSource.REQUEST_BY_USER;
629        onChanged();
630        return this;
631      }
632
633      // @@protoc_insertion_point(builder_scope:hadoop.common.HAStateChangeRequestInfoProto)
634    }
635
636    static {
637      defaultInstance = new HAStateChangeRequestInfoProto(true);
638      defaultInstance.initFields();
639    }
640
641    // @@protoc_insertion_point(class_scope:hadoop.common.HAStateChangeRequestInfoProto)
642  }
643
644  public interface MonitorHealthRequestProtoOrBuilder
645      extends com.google.protobuf.MessageOrBuilder {
646  }
647  /**
648   * Protobuf type {@code hadoop.common.MonitorHealthRequestProto}
649   *
650   * <pre>
651   **
652   * void request
653   * </pre>
654   */
655  public static final class MonitorHealthRequestProto extends
656      com.google.protobuf.GeneratedMessage
657      implements MonitorHealthRequestProtoOrBuilder {
658    // Use MonitorHealthRequestProto.newBuilder() to construct.
659    private MonitorHealthRequestProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
660      super(builder);
661      this.unknownFields = builder.getUnknownFields();
662    }
663    private MonitorHealthRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
664
665    private static final MonitorHealthRequestProto defaultInstance;
666    public static MonitorHealthRequestProto getDefaultInstance() {
667      return defaultInstance;
668    }
669
670    public MonitorHealthRequestProto getDefaultInstanceForType() {
671      return defaultInstance;
672    }
673
674    private final com.google.protobuf.UnknownFieldSet unknownFields;
675    @java.lang.Override
676    public final com.google.protobuf.UnknownFieldSet
677        getUnknownFields() {
678      return this.unknownFields;
679    }
680    private MonitorHealthRequestProto(
681        com.google.protobuf.CodedInputStream input,
682        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
683        throws com.google.protobuf.InvalidProtocolBufferException {
684      initFields();
685      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
686          com.google.protobuf.UnknownFieldSet.newBuilder();
687      try {
688        boolean done = false;
689        while (!done) {
690          int tag = input.readTag();
691          switch (tag) {
692            case 0:
693              done = true;
694              break;
695            default: {
696              if (!parseUnknownField(input, unknownFields,
697                                     extensionRegistry, tag)) {
698                done = true;
699              }
700              break;
701            }
702          }
703        }
704      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
705        throw e.setUnfinishedMessage(this);
706      } catch (java.io.IOException e) {
707        throw new com.google.protobuf.InvalidProtocolBufferException(
708            e.getMessage()).setUnfinishedMessage(this);
709      } finally {
710        this.unknownFields = unknownFields.build();
711        makeExtensionsImmutable();
712      }
713    }
714    public static final com.google.protobuf.Descriptors.Descriptor
715        getDescriptor() {
716      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_MonitorHealthRequestProto_descriptor;
717    }
718
719    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
720        internalGetFieldAccessorTable() {
721      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_MonitorHealthRequestProto_fieldAccessorTable
722          .ensureFieldAccessorsInitialized(
723              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto.class, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto.Builder.class);
724    }
725
726    public static com.google.protobuf.Parser<MonitorHealthRequestProto> PARSER =
727        new com.google.protobuf.AbstractParser<MonitorHealthRequestProto>() {
728      public MonitorHealthRequestProto parsePartialFrom(
729          com.google.protobuf.CodedInputStream input,
730          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
731          throws com.google.protobuf.InvalidProtocolBufferException {
732        return new MonitorHealthRequestProto(input, extensionRegistry);
733      }
734    };
735
736    @java.lang.Override
737    public com.google.protobuf.Parser<MonitorHealthRequestProto> getParserForType() {
738      return PARSER;
739    }
740
741    private void initFields() {
742    }
743    private byte memoizedIsInitialized = -1;
744    public final boolean isInitialized() {
745      byte isInitialized = memoizedIsInitialized;
746      if (isInitialized != -1) return isInitialized == 1;
747
748      memoizedIsInitialized = 1;
749      return true;
750    }
751
752    public void writeTo(com.google.protobuf.CodedOutputStream output)
753                        throws java.io.IOException {
754      getSerializedSize();
755      getUnknownFields().writeTo(output);
756    }
757
758    private int memoizedSerializedSize = -1;
759    public int getSerializedSize() {
760      int size = memoizedSerializedSize;
761      if (size != -1) return size;
762
763      size = 0;
764      size += getUnknownFields().getSerializedSize();
765      memoizedSerializedSize = size;
766      return size;
767    }
768
769    private static final long serialVersionUID = 0L;
770    @java.lang.Override
771    protected java.lang.Object writeReplace()
772        throws java.io.ObjectStreamException {
773      return super.writeReplace();
774    }
775
776    @java.lang.Override
777    public boolean equals(final java.lang.Object obj) {
778      if (obj == this) {
779       return true;
780      }
781      if (!(obj instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto)) {
782        return super.equals(obj);
783      }
784      org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto other = (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto) obj;
785
786      boolean result = true;
787      result = result &&
788          getUnknownFields().equals(other.getUnknownFields());
789      return result;
790    }
791
792    private int memoizedHashCode = 0;
793    @java.lang.Override
794    public int hashCode() {
795      if (memoizedHashCode != 0) {
796        return memoizedHashCode;
797      }
798      int hash = 41;
799      hash = (19 * hash) + getDescriptorForType().hashCode();
800      hash = (29 * hash) + getUnknownFields().hashCode();
801      memoizedHashCode = hash;
802      return hash;
803    }
804
805    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto parseFrom(
806        com.google.protobuf.ByteString data)
807        throws com.google.protobuf.InvalidProtocolBufferException {
808      return PARSER.parseFrom(data);
809    }
810    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto parseFrom(
811        com.google.protobuf.ByteString data,
812        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
813        throws com.google.protobuf.InvalidProtocolBufferException {
814      return PARSER.parseFrom(data, extensionRegistry);
815    }
816    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto parseFrom(byte[] data)
817        throws com.google.protobuf.InvalidProtocolBufferException {
818      return PARSER.parseFrom(data);
819    }
820    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto parseFrom(
821        byte[] data,
822        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
823        throws com.google.protobuf.InvalidProtocolBufferException {
824      return PARSER.parseFrom(data, extensionRegistry);
825    }
826    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto parseFrom(java.io.InputStream input)
827        throws java.io.IOException {
828      return PARSER.parseFrom(input);
829    }
830    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto parseFrom(
831        java.io.InputStream input,
832        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
833        throws java.io.IOException {
834      return PARSER.parseFrom(input, extensionRegistry);
835    }
836    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto parseDelimitedFrom(java.io.InputStream input)
837        throws java.io.IOException {
838      return PARSER.parseDelimitedFrom(input);
839    }
840    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto parseDelimitedFrom(
841        java.io.InputStream input,
842        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
843        throws java.io.IOException {
844      return PARSER.parseDelimitedFrom(input, extensionRegistry);
845    }
846    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto parseFrom(
847        com.google.protobuf.CodedInputStream input)
848        throws java.io.IOException {
849      return PARSER.parseFrom(input);
850    }
851    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto parseFrom(
852        com.google.protobuf.CodedInputStream input,
853        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
854        throws java.io.IOException {
855      return PARSER.parseFrom(input, extensionRegistry);
856    }
857
858    public static Builder newBuilder() { return Builder.create(); }
859    public Builder newBuilderForType() { return newBuilder(); }
860    public static Builder newBuilder(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto prototype) {
861      return newBuilder().mergeFrom(prototype);
862    }
863    public Builder toBuilder() { return newBuilder(this); }
864
865    @java.lang.Override
866    protected Builder newBuilderForType(
867        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
868      Builder builder = new Builder(parent);
869      return builder;
870    }
871    /**
872     * Protobuf type {@code hadoop.common.MonitorHealthRequestProto}
873     *
874     * <pre>
875     **
876     * void request
877     * </pre>
878     */
879    public static final class Builder extends
880        com.google.protobuf.GeneratedMessage.Builder<Builder>
881       implements org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProtoOrBuilder {
882      public static final com.google.protobuf.Descriptors.Descriptor
883          getDescriptor() {
884        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_MonitorHealthRequestProto_descriptor;
885      }
886
887      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
888          internalGetFieldAccessorTable() {
889        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_MonitorHealthRequestProto_fieldAccessorTable
890            .ensureFieldAccessorsInitialized(
891                org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto.class, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto.Builder.class);
892      }
893
894      // Construct using org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto.newBuilder()
895      private Builder() {
896        maybeForceBuilderInitialization();
897      }
898
899      private Builder(
900          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
901        super(parent);
902        maybeForceBuilderInitialization();
903      }
904      private void maybeForceBuilderInitialization() {
905        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
906        }
907      }
908      private static Builder create() {
909        return new Builder();
910      }
911
912      public Builder clear() {
913        super.clear();
914        return this;
915      }
916
917      public Builder clone() {
918        return create().mergeFrom(buildPartial());
919      }
920
921      public com.google.protobuf.Descriptors.Descriptor
922          getDescriptorForType() {
923        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_MonitorHealthRequestProto_descriptor;
924      }
925
926      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto getDefaultInstanceForType() {
927        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto.getDefaultInstance();
928      }
929
930      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto build() {
931        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto result = buildPartial();
932        if (!result.isInitialized()) {
933          throw newUninitializedMessageException(result);
934        }
935        return result;
936      }
937
938      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto buildPartial() {
939        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto result = new org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto(this);
940        onBuilt();
941        return result;
942      }
943
944      public Builder mergeFrom(com.google.protobuf.Message other) {
945        if (other instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto) {
946          return mergeFrom((org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto)other);
947        } else {
948          super.mergeFrom(other);
949          return this;
950        }
951      }
952
953      public Builder mergeFrom(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto other) {
954        if (other == org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto.getDefaultInstance()) return this;
955        this.mergeUnknownFields(other.getUnknownFields());
956        return this;
957      }
958
959      public final boolean isInitialized() {
960        return true;
961      }
962
963      public Builder mergeFrom(
964          com.google.protobuf.CodedInputStream input,
965          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
966          throws java.io.IOException {
967        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto parsedMessage = null;
968        try {
969          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
970        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
971          parsedMessage = (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto) e.getUnfinishedMessage();
972          throw e;
973        } finally {
974          if (parsedMessage != null) {
975            mergeFrom(parsedMessage);
976          }
977        }
978        return this;
979      }
980
981      // @@protoc_insertion_point(builder_scope:hadoop.common.MonitorHealthRequestProto)
982    }
983
984    static {
985      defaultInstance = new MonitorHealthRequestProto(true);
986      defaultInstance.initFields();
987    }
988
989    // @@protoc_insertion_point(class_scope:hadoop.common.MonitorHealthRequestProto)
990  }
991
992  public interface MonitorHealthResponseProtoOrBuilder
993      extends com.google.protobuf.MessageOrBuilder {
994  }
995  /**
996   * Protobuf type {@code hadoop.common.MonitorHealthResponseProto}
997   *
998   * <pre>
999   **
1000   * void response
1001   * </pre>
1002   */
1003  public static final class MonitorHealthResponseProto extends
1004      com.google.protobuf.GeneratedMessage
1005      implements MonitorHealthResponseProtoOrBuilder {
1006    // Use MonitorHealthResponseProto.newBuilder() to construct.
1007    private MonitorHealthResponseProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
1008      super(builder);
1009      this.unknownFields = builder.getUnknownFields();
1010    }
1011    private MonitorHealthResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
1012
1013    private static final MonitorHealthResponseProto defaultInstance;
1014    public static MonitorHealthResponseProto getDefaultInstance() {
1015      return defaultInstance;
1016    }
1017
1018    public MonitorHealthResponseProto getDefaultInstanceForType() {
1019      return defaultInstance;
1020    }
1021
1022    private final com.google.protobuf.UnknownFieldSet unknownFields;
1023    @java.lang.Override
1024    public final com.google.protobuf.UnknownFieldSet
1025        getUnknownFields() {
1026      return this.unknownFields;
1027    }
1028    private MonitorHealthResponseProto(
1029        com.google.protobuf.CodedInputStream input,
1030        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1031        throws com.google.protobuf.InvalidProtocolBufferException {
1032      initFields();
1033      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
1034          com.google.protobuf.UnknownFieldSet.newBuilder();
1035      try {
1036        boolean done = false;
1037        while (!done) {
1038          int tag = input.readTag();
1039          switch (tag) {
1040            case 0:
1041              done = true;
1042              break;
1043            default: {
1044              if (!parseUnknownField(input, unknownFields,
1045                                     extensionRegistry, tag)) {
1046                done = true;
1047              }
1048              break;
1049            }
1050          }
1051        }
1052      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1053        throw e.setUnfinishedMessage(this);
1054      } catch (java.io.IOException e) {
1055        throw new com.google.protobuf.InvalidProtocolBufferException(
1056            e.getMessage()).setUnfinishedMessage(this);
1057      } finally {
1058        this.unknownFields = unknownFields.build();
1059        makeExtensionsImmutable();
1060      }
1061    }
1062    public static final com.google.protobuf.Descriptors.Descriptor
1063        getDescriptor() {
1064      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_MonitorHealthResponseProto_descriptor;
1065    }
1066
1067    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1068        internalGetFieldAccessorTable() {
1069      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_MonitorHealthResponseProto_fieldAccessorTable
1070          .ensureFieldAccessorsInitialized(
1071              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto.class, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto.Builder.class);
1072    }
1073
1074    public static com.google.protobuf.Parser<MonitorHealthResponseProto> PARSER =
1075        new com.google.protobuf.AbstractParser<MonitorHealthResponseProto>() {
1076      public MonitorHealthResponseProto parsePartialFrom(
1077          com.google.protobuf.CodedInputStream input,
1078          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1079          throws com.google.protobuf.InvalidProtocolBufferException {
1080        return new MonitorHealthResponseProto(input, extensionRegistry);
1081      }
1082    };
1083
1084    @java.lang.Override
1085    public com.google.protobuf.Parser<MonitorHealthResponseProto> getParserForType() {
1086      return PARSER;
1087    }
1088
1089    private void initFields() {
1090    }
1091    private byte memoizedIsInitialized = -1;
1092    public final boolean isInitialized() {
1093      byte isInitialized = memoizedIsInitialized;
1094      if (isInitialized != -1) return isInitialized == 1;
1095
1096      memoizedIsInitialized = 1;
1097      return true;
1098    }
1099
1100    public void writeTo(com.google.protobuf.CodedOutputStream output)
1101                        throws java.io.IOException {
1102      getSerializedSize();
1103      getUnknownFields().writeTo(output);
1104    }
1105
1106    private int memoizedSerializedSize = -1;
1107    public int getSerializedSize() {
1108      int size = memoizedSerializedSize;
1109      if (size != -1) return size;
1110
1111      size = 0;
1112      size += getUnknownFields().getSerializedSize();
1113      memoizedSerializedSize = size;
1114      return size;
1115    }
1116
1117    private static final long serialVersionUID = 0L;
1118    @java.lang.Override
1119    protected java.lang.Object writeReplace()
1120        throws java.io.ObjectStreamException {
1121      return super.writeReplace();
1122    }
1123
1124    @java.lang.Override
1125    public boolean equals(final java.lang.Object obj) {
1126      if (obj == this) {
1127       return true;
1128      }
1129      if (!(obj instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto)) {
1130        return super.equals(obj);
1131      }
1132      org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto other = (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto) obj;
1133
1134      boolean result = true;
1135      result = result &&
1136          getUnknownFields().equals(other.getUnknownFields());
1137      return result;
1138    }
1139
1140    private int memoizedHashCode = 0;
1141    @java.lang.Override
1142    public int hashCode() {
1143      if (memoizedHashCode != 0) {
1144        return memoizedHashCode;
1145      }
1146      int hash = 41;
1147      hash = (19 * hash) + getDescriptorForType().hashCode();
1148      hash = (29 * hash) + getUnknownFields().hashCode();
1149      memoizedHashCode = hash;
1150      return hash;
1151    }
1152
1153    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto parseFrom(
1154        com.google.protobuf.ByteString data)
1155        throws com.google.protobuf.InvalidProtocolBufferException {
1156      return PARSER.parseFrom(data);
1157    }
1158    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto parseFrom(
1159        com.google.protobuf.ByteString data,
1160        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1161        throws com.google.protobuf.InvalidProtocolBufferException {
1162      return PARSER.parseFrom(data, extensionRegistry);
1163    }
1164    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto parseFrom(byte[] data)
1165        throws com.google.protobuf.InvalidProtocolBufferException {
1166      return PARSER.parseFrom(data);
1167    }
1168    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto parseFrom(
1169        byte[] data,
1170        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1171        throws com.google.protobuf.InvalidProtocolBufferException {
1172      return PARSER.parseFrom(data, extensionRegistry);
1173    }
1174    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto parseFrom(java.io.InputStream input)
1175        throws java.io.IOException {
1176      return PARSER.parseFrom(input);
1177    }
1178    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto parseFrom(
1179        java.io.InputStream input,
1180        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1181        throws java.io.IOException {
1182      return PARSER.parseFrom(input, extensionRegistry);
1183    }
1184    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto parseDelimitedFrom(java.io.InputStream input)
1185        throws java.io.IOException {
1186      return PARSER.parseDelimitedFrom(input);
1187    }
1188    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto parseDelimitedFrom(
1189        java.io.InputStream input,
1190        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1191        throws java.io.IOException {
1192      return PARSER.parseDelimitedFrom(input, extensionRegistry);
1193    }
1194    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto parseFrom(
1195        com.google.protobuf.CodedInputStream input)
1196        throws java.io.IOException {
1197      return PARSER.parseFrom(input);
1198    }
1199    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto parseFrom(
1200        com.google.protobuf.CodedInputStream input,
1201        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1202        throws java.io.IOException {
1203      return PARSER.parseFrom(input, extensionRegistry);
1204    }
1205
1206    public static Builder newBuilder() { return Builder.create(); }
1207    public Builder newBuilderForType() { return newBuilder(); }
1208    public static Builder newBuilder(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto prototype) {
1209      return newBuilder().mergeFrom(prototype);
1210    }
1211    public Builder toBuilder() { return newBuilder(this); }
1212
1213    @java.lang.Override
1214    protected Builder newBuilderForType(
1215        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1216      Builder builder = new Builder(parent);
1217      return builder;
1218    }
1219    /**
1220     * Protobuf type {@code hadoop.common.MonitorHealthResponseProto}
1221     *
1222     * <pre>
1223     **
1224     * void response
1225     * </pre>
1226     */
1227    public static final class Builder extends
1228        com.google.protobuf.GeneratedMessage.Builder<Builder>
1229       implements org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProtoOrBuilder {
1230      public static final com.google.protobuf.Descriptors.Descriptor
1231          getDescriptor() {
1232        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_MonitorHealthResponseProto_descriptor;
1233      }
1234
1235      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1236          internalGetFieldAccessorTable() {
1237        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_MonitorHealthResponseProto_fieldAccessorTable
1238            .ensureFieldAccessorsInitialized(
1239                org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto.class, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto.Builder.class);
1240      }
1241
1242      // Construct using org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto.newBuilder()
1243      private Builder() {
1244        maybeForceBuilderInitialization();
1245      }
1246
1247      private Builder(
1248          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1249        super(parent);
1250        maybeForceBuilderInitialization();
1251      }
1252      private void maybeForceBuilderInitialization() {
1253        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
1254        }
1255      }
1256      private static Builder create() {
1257        return new Builder();
1258      }
1259
1260      public Builder clear() {
1261        super.clear();
1262        return this;
1263      }
1264
1265      public Builder clone() {
1266        return create().mergeFrom(buildPartial());
1267      }
1268
1269      public com.google.protobuf.Descriptors.Descriptor
1270          getDescriptorForType() {
1271        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_MonitorHealthResponseProto_descriptor;
1272      }
1273
1274      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto getDefaultInstanceForType() {
1275        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto.getDefaultInstance();
1276      }
1277
1278      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto build() {
1279        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto result = buildPartial();
1280        if (!result.isInitialized()) {
1281          throw newUninitializedMessageException(result);
1282        }
1283        return result;
1284      }
1285
1286      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto buildPartial() {
1287        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto result = new org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto(this);
1288        onBuilt();
1289        return result;
1290      }
1291
1292      public Builder mergeFrom(com.google.protobuf.Message other) {
1293        if (other instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto) {
1294          return mergeFrom((org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto)other);
1295        } else {
1296          super.mergeFrom(other);
1297          return this;
1298        }
1299      }
1300
1301      public Builder mergeFrom(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto other) {
1302        if (other == org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto.getDefaultInstance()) return this;
1303        this.mergeUnknownFields(other.getUnknownFields());
1304        return this;
1305      }
1306
1307      public final boolean isInitialized() {
1308        return true;
1309      }
1310
1311      public Builder mergeFrom(
1312          com.google.protobuf.CodedInputStream input,
1313          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1314          throws java.io.IOException {
1315        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto parsedMessage = null;
1316        try {
1317          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
1318        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1319          parsedMessage = (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto) e.getUnfinishedMessage();
1320          throw e;
1321        } finally {
1322          if (parsedMessage != null) {
1323            mergeFrom(parsedMessage);
1324          }
1325        }
1326        return this;
1327      }
1328
1329      // @@protoc_insertion_point(builder_scope:hadoop.common.MonitorHealthResponseProto)
1330    }
1331
1332    static {
1333      defaultInstance = new MonitorHealthResponseProto(true);
1334      defaultInstance.initFields();
1335    }
1336
1337    // @@protoc_insertion_point(class_scope:hadoop.common.MonitorHealthResponseProto)
1338  }
1339
1340  public interface TransitionToActiveRequestProtoOrBuilder
1341      extends com.google.protobuf.MessageOrBuilder {
1342
1343    // required .hadoop.common.HAStateChangeRequestInfoProto reqInfo = 1;
1344    /**
1345     * <code>required .hadoop.common.HAStateChangeRequestInfoProto reqInfo = 1;</code>
1346     */
1347    boolean hasReqInfo();
1348    /**
1349     * <code>required .hadoop.common.HAStateChangeRequestInfoProto reqInfo = 1;</code>
1350     */
1351    org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto getReqInfo();
1352    /**
1353     * <code>required .hadoop.common.HAStateChangeRequestInfoProto reqInfo = 1;</code>
1354     */
1355    org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProtoOrBuilder getReqInfoOrBuilder();
1356  }
1357  /**
1358   * Protobuf type {@code hadoop.common.TransitionToActiveRequestProto}
1359   *
1360   * <pre>
1361   **
1362   * void request
1363   * </pre>
1364   */
1365  public static final class TransitionToActiveRequestProto extends
1366      com.google.protobuf.GeneratedMessage
1367      implements TransitionToActiveRequestProtoOrBuilder {
1368    // Use TransitionToActiveRequestProto.newBuilder() to construct.
1369    private TransitionToActiveRequestProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
1370      super(builder);
1371      this.unknownFields = builder.getUnknownFields();
1372    }
1373    private TransitionToActiveRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
1374
1375    private static final TransitionToActiveRequestProto defaultInstance;
1376    public static TransitionToActiveRequestProto getDefaultInstance() {
1377      return defaultInstance;
1378    }
1379
1380    public TransitionToActiveRequestProto getDefaultInstanceForType() {
1381      return defaultInstance;
1382    }
1383
1384    private final com.google.protobuf.UnknownFieldSet unknownFields;
1385    @java.lang.Override
1386    public final com.google.protobuf.UnknownFieldSet
1387        getUnknownFields() {
1388      return this.unknownFields;
1389    }
1390    private TransitionToActiveRequestProto(
1391        com.google.protobuf.CodedInputStream input,
1392        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1393        throws com.google.protobuf.InvalidProtocolBufferException {
1394      initFields();
1395      int mutable_bitField0_ = 0;
1396      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
1397          com.google.protobuf.UnknownFieldSet.newBuilder();
1398      try {
1399        boolean done = false;
1400        while (!done) {
1401          int tag = input.readTag();
1402          switch (tag) {
1403            case 0:
1404              done = true;
1405              break;
1406            default: {
1407              if (!parseUnknownField(input, unknownFields,
1408                                     extensionRegistry, tag)) {
1409                done = true;
1410              }
1411              break;
1412            }
1413            case 10: {
1414              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.Builder subBuilder = null;
1415              if (((bitField0_ & 0x00000001) == 0x00000001)) {
1416                subBuilder = reqInfo_.toBuilder();
1417              }
1418              reqInfo_ = input.readMessage(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.PARSER, extensionRegistry);
1419              if (subBuilder != null) {
1420                subBuilder.mergeFrom(reqInfo_);
1421                reqInfo_ = subBuilder.buildPartial();
1422              }
1423              bitField0_ |= 0x00000001;
1424              break;
1425            }
1426          }
1427        }
1428      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1429        throw e.setUnfinishedMessage(this);
1430      } catch (java.io.IOException e) {
1431        throw new com.google.protobuf.InvalidProtocolBufferException(
1432            e.getMessage()).setUnfinishedMessage(this);
1433      } finally {
1434        this.unknownFields = unknownFields.build();
1435        makeExtensionsImmutable();
1436      }
1437    }
1438    public static final com.google.protobuf.Descriptors.Descriptor
1439        getDescriptor() {
1440      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_TransitionToActiveRequestProto_descriptor;
1441    }
1442
1443    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1444        internalGetFieldAccessorTable() {
1445      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_TransitionToActiveRequestProto_fieldAccessorTable
1446          .ensureFieldAccessorsInitialized(
1447              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto.class, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto.Builder.class);
1448    }
1449
1450    public static com.google.protobuf.Parser<TransitionToActiveRequestProto> PARSER =
1451        new com.google.protobuf.AbstractParser<TransitionToActiveRequestProto>() {
1452      public TransitionToActiveRequestProto parsePartialFrom(
1453          com.google.protobuf.CodedInputStream input,
1454          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1455          throws com.google.protobuf.InvalidProtocolBufferException {
1456        return new TransitionToActiveRequestProto(input, extensionRegistry);
1457      }
1458    };
1459
1460    @java.lang.Override
1461    public com.google.protobuf.Parser<TransitionToActiveRequestProto> getParserForType() {
1462      return PARSER;
1463    }
1464
1465    private int bitField0_;
1466    // required .hadoop.common.HAStateChangeRequestInfoProto reqInfo = 1;
1467    public static final int REQINFO_FIELD_NUMBER = 1;
1468    private org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto reqInfo_;
1469    /**
1470     * <code>required .hadoop.common.HAStateChangeRequestInfoProto reqInfo = 1;</code>
1471     */
1472    public boolean hasReqInfo() {
1473      return ((bitField0_ & 0x00000001) == 0x00000001);
1474    }
1475    /**
1476     * <code>required .hadoop.common.HAStateChangeRequestInfoProto reqInfo = 1;</code>
1477     */
1478    public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto getReqInfo() {
1479      return reqInfo_;
1480    }
1481    /**
1482     * <code>required .hadoop.common.HAStateChangeRequestInfoProto reqInfo = 1;</code>
1483     */
1484    public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProtoOrBuilder getReqInfoOrBuilder() {
1485      return reqInfo_;
1486    }
1487
1488    private void initFields() {
1489      reqInfo_ = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.getDefaultInstance();
1490    }
1491    private byte memoizedIsInitialized = -1;
1492    public final boolean isInitialized() {
1493      byte isInitialized = memoizedIsInitialized;
1494      if (isInitialized != -1) return isInitialized == 1;
1495
1496      if (!hasReqInfo()) {
1497        memoizedIsInitialized = 0;
1498        return false;
1499      }
1500      if (!getReqInfo().isInitialized()) {
1501        memoizedIsInitialized = 0;
1502        return false;
1503      }
1504      memoizedIsInitialized = 1;
1505      return true;
1506    }
1507
1508    public void writeTo(com.google.protobuf.CodedOutputStream output)
1509                        throws java.io.IOException {
1510      getSerializedSize();
1511      if (((bitField0_ & 0x00000001) == 0x00000001)) {
1512        output.writeMessage(1, reqInfo_);
1513      }
1514      getUnknownFields().writeTo(output);
1515    }
1516
1517    private int memoizedSerializedSize = -1;
1518    public int getSerializedSize() {
1519      int size = memoizedSerializedSize;
1520      if (size != -1) return size;
1521
1522      size = 0;
1523      if (((bitField0_ & 0x00000001) == 0x00000001)) {
1524        size += com.google.protobuf.CodedOutputStream
1525          .computeMessageSize(1, reqInfo_);
1526      }
1527      size += getUnknownFields().getSerializedSize();
1528      memoizedSerializedSize = size;
1529      return size;
1530    }
1531
1532    private static final long serialVersionUID = 0L;
1533    @java.lang.Override
1534    protected java.lang.Object writeReplace()
1535        throws java.io.ObjectStreamException {
1536      return super.writeReplace();
1537    }
1538
1539    @java.lang.Override
1540    public boolean equals(final java.lang.Object obj) {
1541      if (obj == this) {
1542       return true;
1543      }
1544      if (!(obj instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto)) {
1545        return super.equals(obj);
1546      }
1547      org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto other = (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto) obj;
1548
1549      boolean result = true;
1550      result = result && (hasReqInfo() == other.hasReqInfo());
1551      if (hasReqInfo()) {
1552        result = result && getReqInfo()
1553            .equals(other.getReqInfo());
1554      }
1555      result = result &&
1556          getUnknownFields().equals(other.getUnknownFields());
1557      return result;
1558    }
1559
1560    private int memoizedHashCode = 0;
1561    @java.lang.Override
1562    public int hashCode() {
1563      if (memoizedHashCode != 0) {
1564        return memoizedHashCode;
1565      }
1566      int hash = 41;
1567      hash = (19 * hash) + getDescriptorForType().hashCode();
1568      if (hasReqInfo()) {
1569        hash = (37 * hash) + REQINFO_FIELD_NUMBER;
1570        hash = (53 * hash) + getReqInfo().hashCode();
1571      }
1572      hash = (29 * hash) + getUnknownFields().hashCode();
1573      memoizedHashCode = hash;
1574      return hash;
1575    }
1576
1577    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto parseFrom(
1578        com.google.protobuf.ByteString data)
1579        throws com.google.protobuf.InvalidProtocolBufferException {
1580      return PARSER.parseFrom(data);
1581    }
1582    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto parseFrom(
1583        com.google.protobuf.ByteString data,
1584        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1585        throws com.google.protobuf.InvalidProtocolBufferException {
1586      return PARSER.parseFrom(data, extensionRegistry);
1587    }
1588    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto parseFrom(byte[] data)
1589        throws com.google.protobuf.InvalidProtocolBufferException {
1590      return PARSER.parseFrom(data);
1591    }
1592    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto parseFrom(
1593        byte[] data,
1594        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1595        throws com.google.protobuf.InvalidProtocolBufferException {
1596      return PARSER.parseFrom(data, extensionRegistry);
1597    }
1598    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto parseFrom(java.io.InputStream input)
1599        throws java.io.IOException {
1600      return PARSER.parseFrom(input);
1601    }
1602    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto parseFrom(
1603        java.io.InputStream input,
1604        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1605        throws java.io.IOException {
1606      return PARSER.parseFrom(input, extensionRegistry);
1607    }
1608    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto parseDelimitedFrom(java.io.InputStream input)
1609        throws java.io.IOException {
1610      return PARSER.parseDelimitedFrom(input);
1611    }
1612    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto parseDelimitedFrom(
1613        java.io.InputStream input,
1614        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1615        throws java.io.IOException {
1616      return PARSER.parseDelimitedFrom(input, extensionRegistry);
1617    }
1618    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto parseFrom(
1619        com.google.protobuf.CodedInputStream input)
1620        throws java.io.IOException {
1621      return PARSER.parseFrom(input);
1622    }
1623    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto parseFrom(
1624        com.google.protobuf.CodedInputStream input,
1625        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1626        throws java.io.IOException {
1627      return PARSER.parseFrom(input, extensionRegistry);
1628    }
1629
1630    public static Builder newBuilder() { return Builder.create(); }
1631    public Builder newBuilderForType() { return newBuilder(); }
1632    public static Builder newBuilder(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto prototype) {
1633      return newBuilder().mergeFrom(prototype);
1634    }
1635    public Builder toBuilder() { return newBuilder(this); }
1636
1637    @java.lang.Override
1638    protected Builder newBuilderForType(
1639        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1640      Builder builder = new Builder(parent);
1641      return builder;
1642    }
1643    /**
1644     * Protobuf type {@code hadoop.common.TransitionToActiveRequestProto}
1645     *
1646     * <pre>
1647     **
1648     * void request
1649     * </pre>
1650     */
1651    public static final class Builder extends
1652        com.google.protobuf.GeneratedMessage.Builder<Builder>
1653       implements org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProtoOrBuilder {
1654      public static final com.google.protobuf.Descriptors.Descriptor
1655          getDescriptor() {
1656        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_TransitionToActiveRequestProto_descriptor;
1657      }
1658
1659      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1660          internalGetFieldAccessorTable() {
1661        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_TransitionToActiveRequestProto_fieldAccessorTable
1662            .ensureFieldAccessorsInitialized(
1663                org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto.class, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto.Builder.class);
1664      }
1665
1666      // Construct using org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto.newBuilder()
1667      private Builder() {
1668        maybeForceBuilderInitialization();
1669      }
1670
1671      private Builder(
1672          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1673        super(parent);
1674        maybeForceBuilderInitialization();
1675      }
1676      private void maybeForceBuilderInitialization() {
1677        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
1678          getReqInfoFieldBuilder();
1679        }
1680      }
1681      private static Builder create() {
1682        return new Builder();
1683      }
1684
1685      public Builder clear() {
1686        super.clear();
1687        if (reqInfoBuilder_ == null) {
1688          reqInfo_ = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.getDefaultInstance();
1689        } else {
1690          reqInfoBuilder_.clear();
1691        }
1692        bitField0_ = (bitField0_ & ~0x00000001);
1693        return this;
1694      }
1695
1696      public Builder clone() {
1697        return create().mergeFrom(buildPartial());
1698      }
1699
1700      public com.google.protobuf.Descriptors.Descriptor
1701          getDescriptorForType() {
1702        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_TransitionToActiveRequestProto_descriptor;
1703      }
1704
1705      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto getDefaultInstanceForType() {
1706        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto.getDefaultInstance();
1707      }
1708
1709      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto build() {
1710        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto result = buildPartial();
1711        if (!result.isInitialized()) {
1712          throw newUninitializedMessageException(result);
1713        }
1714        return result;
1715      }
1716
1717      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto buildPartial() {
1718        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto result = new org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto(this);
1719        int from_bitField0_ = bitField0_;
1720        int to_bitField0_ = 0;
1721        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
1722          to_bitField0_ |= 0x00000001;
1723        }
1724        if (reqInfoBuilder_ == null) {
1725          result.reqInfo_ = reqInfo_;
1726        } else {
1727          result.reqInfo_ = reqInfoBuilder_.build();
1728        }
1729        result.bitField0_ = to_bitField0_;
1730        onBuilt();
1731        return result;
1732      }
1733
1734      public Builder mergeFrom(com.google.protobuf.Message other) {
1735        if (other instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto) {
1736          return mergeFrom((org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto)other);
1737        } else {
1738          super.mergeFrom(other);
1739          return this;
1740        }
1741      }
1742
1743      public Builder mergeFrom(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto other) {
1744        if (other == org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto.getDefaultInstance()) return this;
1745        if (other.hasReqInfo()) {
1746          mergeReqInfo(other.getReqInfo());
1747        }
1748        this.mergeUnknownFields(other.getUnknownFields());
1749        return this;
1750      }
1751
1752      public final boolean isInitialized() {
1753        if (!hasReqInfo()) {
1754          
1755          return false;
1756        }
1757        if (!getReqInfo().isInitialized()) {
1758          
1759          return false;
1760        }
1761        return true;
1762      }
1763
1764      public Builder mergeFrom(
1765          com.google.protobuf.CodedInputStream input,
1766          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1767          throws java.io.IOException {
1768        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto parsedMessage = null;
1769        try {
1770          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
1771        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1772          parsedMessage = (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto) e.getUnfinishedMessage();
1773          throw e;
1774        } finally {
1775          if (parsedMessage != null) {
1776            mergeFrom(parsedMessage);
1777          }
1778        }
1779        return this;
1780      }
1781      private int bitField0_;
1782
1783      // required .hadoop.common.HAStateChangeRequestInfoProto reqInfo = 1;
1784      private org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto reqInfo_ = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.getDefaultInstance();
1785      private com.google.protobuf.SingleFieldBuilder<
1786          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.Builder, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProtoOrBuilder> reqInfoBuilder_;
1787      /**
1788       * <code>required .hadoop.common.HAStateChangeRequestInfoProto reqInfo = 1;</code>
1789       */
1790      public boolean hasReqInfo() {
1791        return ((bitField0_ & 0x00000001) == 0x00000001);
1792      }
1793      /**
1794       * <code>required .hadoop.common.HAStateChangeRequestInfoProto reqInfo = 1;</code>
1795       */
1796      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto getReqInfo() {
1797        if (reqInfoBuilder_ == null) {
1798          return reqInfo_;
1799        } else {
1800          return reqInfoBuilder_.getMessage();
1801        }
1802      }
1803      /**
1804       * <code>required .hadoop.common.HAStateChangeRequestInfoProto reqInfo = 1;</code>
1805       */
1806      public Builder setReqInfo(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto value) {
1807        if (reqInfoBuilder_ == null) {
1808          if (value == null) {
1809            throw new NullPointerException();
1810          }
1811          reqInfo_ = value;
1812          onChanged();
1813        } else {
1814          reqInfoBuilder_.setMessage(value);
1815        }
1816        bitField0_ |= 0x00000001;
1817        return this;
1818      }
1819      /**
1820       * <code>required .hadoop.common.HAStateChangeRequestInfoProto reqInfo = 1;</code>
1821       */
1822      public Builder setReqInfo(
1823          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.Builder builderForValue) {
1824        if (reqInfoBuilder_ == null) {
1825          reqInfo_ = builderForValue.build();
1826          onChanged();
1827        } else {
1828          reqInfoBuilder_.setMessage(builderForValue.build());
1829        }
1830        bitField0_ |= 0x00000001;
1831        return this;
1832      }
1833      /**
1834       * <code>required .hadoop.common.HAStateChangeRequestInfoProto reqInfo = 1;</code>
1835       */
1836      public Builder mergeReqInfo(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto value) {
1837        if (reqInfoBuilder_ == null) {
1838          if (((bitField0_ & 0x00000001) == 0x00000001) &&
1839              reqInfo_ != org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.getDefaultInstance()) {
1840            reqInfo_ =
1841              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.newBuilder(reqInfo_).mergeFrom(value).buildPartial();
1842          } else {
1843            reqInfo_ = value;
1844          }
1845          onChanged();
1846        } else {
1847          reqInfoBuilder_.mergeFrom(value);
1848        }
1849        bitField0_ |= 0x00000001;
1850        return this;
1851      }
1852      /**
1853       * <code>required .hadoop.common.HAStateChangeRequestInfoProto reqInfo = 1;</code>
1854       */
1855      public Builder clearReqInfo() {
1856        if (reqInfoBuilder_ == null) {
1857          reqInfo_ = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.getDefaultInstance();
1858          onChanged();
1859        } else {
1860          reqInfoBuilder_.clear();
1861        }
1862        bitField0_ = (bitField0_ & ~0x00000001);
1863        return this;
1864      }
1865      /**
1866       * <code>required .hadoop.common.HAStateChangeRequestInfoProto reqInfo = 1;</code>
1867       */
1868      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.Builder getReqInfoBuilder() {
1869        bitField0_ |= 0x00000001;
1870        onChanged();
1871        return getReqInfoFieldBuilder().getBuilder();
1872      }
1873      /**
1874       * <code>required .hadoop.common.HAStateChangeRequestInfoProto reqInfo = 1;</code>
1875       */
1876      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProtoOrBuilder getReqInfoOrBuilder() {
1877        if (reqInfoBuilder_ != null) {
1878          return reqInfoBuilder_.getMessageOrBuilder();
1879        } else {
1880          return reqInfo_;
1881        }
1882      }
1883      /**
1884       * <code>required .hadoop.common.HAStateChangeRequestInfoProto reqInfo = 1;</code>
1885       */
1886      private com.google.protobuf.SingleFieldBuilder<
1887          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.Builder, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProtoOrBuilder> 
1888          getReqInfoFieldBuilder() {
1889        if (reqInfoBuilder_ == null) {
1890          reqInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder<
1891              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.Builder, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProtoOrBuilder>(
1892                  reqInfo_,
1893                  getParentForChildren(),
1894                  isClean());
1895          reqInfo_ = null;
1896        }
1897        return reqInfoBuilder_;
1898      }
1899
1900      // @@protoc_insertion_point(builder_scope:hadoop.common.TransitionToActiveRequestProto)
1901    }
1902
1903    static {
1904      defaultInstance = new TransitionToActiveRequestProto(true);
1905      defaultInstance.initFields();
1906    }
1907
1908    // @@protoc_insertion_point(class_scope:hadoop.common.TransitionToActiveRequestProto)
1909  }
1910
1911  public interface TransitionToActiveResponseProtoOrBuilder
1912      extends com.google.protobuf.MessageOrBuilder {
1913  }
1914  /**
1915   * Protobuf type {@code hadoop.common.TransitionToActiveResponseProto}
1916   *
1917   * <pre>
1918   **
1919   * void response
1920   * </pre>
1921   */
1922  public static final class TransitionToActiveResponseProto extends
1923      com.google.protobuf.GeneratedMessage
1924      implements TransitionToActiveResponseProtoOrBuilder {
1925    // Use TransitionToActiveResponseProto.newBuilder() to construct.
1926    private TransitionToActiveResponseProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
1927      super(builder);
1928      this.unknownFields = builder.getUnknownFields();
1929    }
1930    private TransitionToActiveResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
1931
1932    private static final TransitionToActiveResponseProto defaultInstance;
1933    public static TransitionToActiveResponseProto getDefaultInstance() {
1934      return defaultInstance;
1935    }
1936
1937    public TransitionToActiveResponseProto getDefaultInstanceForType() {
1938      return defaultInstance;
1939    }
1940
1941    private final com.google.protobuf.UnknownFieldSet unknownFields;
1942    @java.lang.Override
1943    public final com.google.protobuf.UnknownFieldSet
1944        getUnknownFields() {
1945      return this.unknownFields;
1946    }
1947    private TransitionToActiveResponseProto(
1948        com.google.protobuf.CodedInputStream input,
1949        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1950        throws com.google.protobuf.InvalidProtocolBufferException {
1951      initFields();
1952      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
1953          com.google.protobuf.UnknownFieldSet.newBuilder();
1954      try {
1955        boolean done = false;
1956        while (!done) {
1957          int tag = input.readTag();
1958          switch (tag) {
1959            case 0:
1960              done = true;
1961              break;
1962            default: {
1963              if (!parseUnknownField(input, unknownFields,
1964                                     extensionRegistry, tag)) {
1965                done = true;
1966              }
1967              break;
1968            }
1969          }
1970        }
1971      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1972        throw e.setUnfinishedMessage(this);
1973      } catch (java.io.IOException e) {
1974        throw new com.google.protobuf.InvalidProtocolBufferException(
1975            e.getMessage()).setUnfinishedMessage(this);
1976      } finally {
1977        this.unknownFields = unknownFields.build();
1978        makeExtensionsImmutable();
1979      }
1980    }
1981    public static final com.google.protobuf.Descriptors.Descriptor
1982        getDescriptor() {
1983      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_TransitionToActiveResponseProto_descriptor;
1984    }
1985
1986    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1987        internalGetFieldAccessorTable() {
1988      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_TransitionToActiveResponseProto_fieldAccessorTable
1989          .ensureFieldAccessorsInitialized(
1990              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto.class, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto.Builder.class);
1991    }
1992
1993    public static com.google.protobuf.Parser<TransitionToActiveResponseProto> PARSER =
1994        new com.google.protobuf.AbstractParser<TransitionToActiveResponseProto>() {
1995      public TransitionToActiveResponseProto parsePartialFrom(
1996          com.google.protobuf.CodedInputStream input,
1997          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1998          throws com.google.protobuf.InvalidProtocolBufferException {
1999        return new TransitionToActiveResponseProto(input, extensionRegistry);
2000      }
2001    };
2002
2003    @java.lang.Override
2004    public com.google.protobuf.Parser<TransitionToActiveResponseProto> getParserForType() {
2005      return PARSER;
2006    }
2007
2008    private void initFields() {
2009    }
2010    private byte memoizedIsInitialized = -1;
2011    public final boolean isInitialized() {
2012      byte isInitialized = memoizedIsInitialized;
2013      if (isInitialized != -1) return isInitialized == 1;
2014
2015      memoizedIsInitialized = 1;
2016      return true;
2017    }
2018
2019    public void writeTo(com.google.protobuf.CodedOutputStream output)
2020                        throws java.io.IOException {
2021      getSerializedSize();
2022      getUnknownFields().writeTo(output);
2023    }
2024
2025    private int memoizedSerializedSize = -1;
2026    public int getSerializedSize() {
2027      int size = memoizedSerializedSize;
2028      if (size != -1) return size;
2029
2030      size = 0;
2031      size += getUnknownFields().getSerializedSize();
2032      memoizedSerializedSize = size;
2033      return size;
2034    }
2035
2036    private static final long serialVersionUID = 0L;
2037    @java.lang.Override
2038    protected java.lang.Object writeReplace()
2039        throws java.io.ObjectStreamException {
2040      return super.writeReplace();
2041    }
2042
2043    @java.lang.Override
2044    public boolean equals(final java.lang.Object obj) {
2045      if (obj == this) {
2046       return true;
2047      }
2048      if (!(obj instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto)) {
2049        return super.equals(obj);
2050      }
2051      org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto other = (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto) obj;
2052
2053      boolean result = true;
2054      result = result &&
2055          getUnknownFields().equals(other.getUnknownFields());
2056      return result;
2057    }
2058
2059    private int memoizedHashCode = 0;
2060    @java.lang.Override
2061    public int hashCode() {
2062      if (memoizedHashCode != 0) {
2063        return memoizedHashCode;
2064      }
2065      int hash = 41;
2066      hash = (19 * hash) + getDescriptorForType().hashCode();
2067      hash = (29 * hash) + getUnknownFields().hashCode();
2068      memoizedHashCode = hash;
2069      return hash;
2070    }
2071
2072    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto parseFrom(
2073        com.google.protobuf.ByteString data)
2074        throws com.google.protobuf.InvalidProtocolBufferException {
2075      return PARSER.parseFrom(data);
2076    }
2077    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto parseFrom(
2078        com.google.protobuf.ByteString data,
2079        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2080        throws com.google.protobuf.InvalidProtocolBufferException {
2081      return PARSER.parseFrom(data, extensionRegistry);
2082    }
2083    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto parseFrom(byte[] data)
2084        throws com.google.protobuf.InvalidProtocolBufferException {
2085      return PARSER.parseFrom(data);
2086    }
2087    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto parseFrom(
2088        byte[] data,
2089        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2090        throws com.google.protobuf.InvalidProtocolBufferException {
2091      return PARSER.parseFrom(data, extensionRegistry);
2092    }
2093    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto parseFrom(java.io.InputStream input)
2094        throws java.io.IOException {
2095      return PARSER.parseFrom(input);
2096    }
2097    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto parseFrom(
2098        java.io.InputStream input,
2099        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2100        throws java.io.IOException {
2101      return PARSER.parseFrom(input, extensionRegistry);
2102    }
2103    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto parseDelimitedFrom(java.io.InputStream input)
2104        throws java.io.IOException {
2105      return PARSER.parseDelimitedFrom(input);
2106    }
2107    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto parseDelimitedFrom(
2108        java.io.InputStream input,
2109        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2110        throws java.io.IOException {
2111      return PARSER.parseDelimitedFrom(input, extensionRegistry);
2112    }
2113    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto parseFrom(
2114        com.google.protobuf.CodedInputStream input)
2115        throws java.io.IOException {
2116      return PARSER.parseFrom(input);
2117    }
2118    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto parseFrom(
2119        com.google.protobuf.CodedInputStream input,
2120        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2121        throws java.io.IOException {
2122      return PARSER.parseFrom(input, extensionRegistry);
2123    }
2124
2125    public static Builder newBuilder() { return Builder.create(); }
2126    public Builder newBuilderForType() { return newBuilder(); }
2127    public static Builder newBuilder(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto prototype) {
2128      return newBuilder().mergeFrom(prototype);
2129    }
2130    public Builder toBuilder() { return newBuilder(this); }
2131
2132    @java.lang.Override
2133    protected Builder newBuilderForType(
2134        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2135      Builder builder = new Builder(parent);
2136      return builder;
2137    }
2138    /**
2139     * Protobuf type {@code hadoop.common.TransitionToActiveResponseProto}
2140     *
2141     * <pre>
2142     **
2143     * void response
2144     * </pre>
2145     */
2146    public static final class Builder extends
2147        com.google.protobuf.GeneratedMessage.Builder<Builder>
2148       implements org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProtoOrBuilder {
2149      public static final com.google.protobuf.Descriptors.Descriptor
2150          getDescriptor() {
2151        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_TransitionToActiveResponseProto_descriptor;
2152      }
2153
2154      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
2155          internalGetFieldAccessorTable() {
2156        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_TransitionToActiveResponseProto_fieldAccessorTable
2157            .ensureFieldAccessorsInitialized(
2158                org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto.class, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto.Builder.class);
2159      }
2160
2161      // Construct using org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto.newBuilder()
2162      private Builder() {
2163        maybeForceBuilderInitialization();
2164      }
2165
2166      private Builder(
2167          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2168        super(parent);
2169        maybeForceBuilderInitialization();
2170      }
2171      private void maybeForceBuilderInitialization() {
2172        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
2173        }
2174      }
2175      private static Builder create() {
2176        return new Builder();
2177      }
2178
2179      public Builder clear() {
2180        super.clear();
2181        return this;
2182      }
2183
2184      public Builder clone() {
2185        return create().mergeFrom(buildPartial());
2186      }
2187
2188      public com.google.protobuf.Descriptors.Descriptor
2189          getDescriptorForType() {
2190        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_TransitionToActiveResponseProto_descriptor;
2191      }
2192
2193      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto getDefaultInstanceForType() {
2194        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto.getDefaultInstance();
2195      }
2196
2197      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto build() {
2198        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto result = buildPartial();
2199        if (!result.isInitialized()) {
2200          throw newUninitializedMessageException(result);
2201        }
2202        return result;
2203      }
2204
2205      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto buildPartial() {
2206        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto result = new org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto(this);
2207        onBuilt();
2208        return result;
2209      }
2210
2211      public Builder mergeFrom(com.google.protobuf.Message other) {
2212        if (other instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto) {
2213          return mergeFrom((org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto)other);
2214        } else {
2215          super.mergeFrom(other);
2216          return this;
2217        }
2218      }
2219
2220      public Builder mergeFrom(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto other) {
2221        if (other == org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto.getDefaultInstance()) return this;
2222        this.mergeUnknownFields(other.getUnknownFields());
2223        return this;
2224      }
2225
2226      public final boolean isInitialized() {
2227        return true;
2228      }
2229
2230      public Builder mergeFrom(
2231          com.google.protobuf.CodedInputStream input,
2232          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2233          throws java.io.IOException {
2234        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto parsedMessage = null;
2235        try {
2236          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
2237        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
2238          parsedMessage = (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto) e.getUnfinishedMessage();
2239          throw e;
2240        } finally {
2241          if (parsedMessage != null) {
2242            mergeFrom(parsedMessage);
2243          }
2244        }
2245        return this;
2246      }
2247
2248      // @@protoc_insertion_point(builder_scope:hadoop.common.TransitionToActiveResponseProto)
2249    }
2250
2251    static {
2252      defaultInstance = new TransitionToActiveResponseProto(true);
2253      defaultInstance.initFields();
2254    }
2255
2256    // @@protoc_insertion_point(class_scope:hadoop.common.TransitionToActiveResponseProto)
2257  }
2258
2259  public interface TransitionToStandbyRequestProtoOrBuilder
2260      extends com.google.protobuf.MessageOrBuilder {
2261
2262    // required .hadoop.common.HAStateChangeRequestInfoProto reqInfo = 1;
2263    /**
2264     * <code>required .hadoop.common.HAStateChangeRequestInfoProto reqInfo = 1;</code>
2265     */
2266    boolean hasReqInfo();
2267    /**
2268     * <code>required .hadoop.common.HAStateChangeRequestInfoProto reqInfo = 1;</code>
2269     */
2270    org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto getReqInfo();
2271    /**
2272     * <code>required .hadoop.common.HAStateChangeRequestInfoProto reqInfo = 1;</code>
2273     */
2274    org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProtoOrBuilder getReqInfoOrBuilder();
2275  }
2276  /**
2277   * Protobuf type {@code hadoop.common.TransitionToStandbyRequestProto}
2278   *
2279   * <pre>
2280   **
2281   * void request
2282   * </pre>
2283   */
2284  public static final class TransitionToStandbyRequestProto extends
2285      com.google.protobuf.GeneratedMessage
2286      implements TransitionToStandbyRequestProtoOrBuilder {
2287    // Use TransitionToStandbyRequestProto.newBuilder() to construct.
2288    private TransitionToStandbyRequestProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
2289      super(builder);
2290      this.unknownFields = builder.getUnknownFields();
2291    }
2292    private TransitionToStandbyRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
2293
2294    private static final TransitionToStandbyRequestProto defaultInstance;
2295    public static TransitionToStandbyRequestProto getDefaultInstance() {
2296      return defaultInstance;
2297    }
2298
2299    public TransitionToStandbyRequestProto getDefaultInstanceForType() {
2300      return defaultInstance;
2301    }
2302
2303    private final com.google.protobuf.UnknownFieldSet unknownFields;
2304    @java.lang.Override
2305    public final com.google.protobuf.UnknownFieldSet
2306        getUnknownFields() {
2307      return this.unknownFields;
2308    }
2309    private TransitionToStandbyRequestProto(
2310        com.google.protobuf.CodedInputStream input,
2311        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2312        throws com.google.protobuf.InvalidProtocolBufferException {
2313      initFields();
2314      int mutable_bitField0_ = 0;
2315      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
2316          com.google.protobuf.UnknownFieldSet.newBuilder();
2317      try {
2318        boolean done = false;
2319        while (!done) {
2320          int tag = input.readTag();
2321          switch (tag) {
2322            case 0:
2323              done = true;
2324              break;
2325            default: {
2326              if (!parseUnknownField(input, unknownFields,
2327                                     extensionRegistry, tag)) {
2328                done = true;
2329              }
2330              break;
2331            }
2332            case 10: {
2333              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.Builder subBuilder = null;
2334              if (((bitField0_ & 0x00000001) == 0x00000001)) {
2335                subBuilder = reqInfo_.toBuilder();
2336              }
2337              reqInfo_ = input.readMessage(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.PARSER, extensionRegistry);
2338              if (subBuilder != null) {
2339                subBuilder.mergeFrom(reqInfo_);
2340                reqInfo_ = subBuilder.buildPartial();
2341              }
2342              bitField0_ |= 0x00000001;
2343              break;
2344            }
2345          }
2346        }
2347      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
2348        throw e.setUnfinishedMessage(this);
2349      } catch (java.io.IOException e) {
2350        throw new com.google.protobuf.InvalidProtocolBufferException(
2351            e.getMessage()).setUnfinishedMessage(this);
2352      } finally {
2353        this.unknownFields = unknownFields.build();
2354        makeExtensionsImmutable();
2355      }
2356    }
2357    public static final com.google.protobuf.Descriptors.Descriptor
2358        getDescriptor() {
2359      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_TransitionToStandbyRequestProto_descriptor;
2360    }
2361
2362    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
2363        internalGetFieldAccessorTable() {
2364      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_TransitionToStandbyRequestProto_fieldAccessorTable
2365          .ensureFieldAccessorsInitialized(
2366              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto.class, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto.Builder.class);
2367    }
2368
2369    public static com.google.protobuf.Parser<TransitionToStandbyRequestProto> PARSER =
2370        new com.google.protobuf.AbstractParser<TransitionToStandbyRequestProto>() {
2371      public TransitionToStandbyRequestProto parsePartialFrom(
2372          com.google.protobuf.CodedInputStream input,
2373          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2374          throws com.google.protobuf.InvalidProtocolBufferException {
2375        return new TransitionToStandbyRequestProto(input, extensionRegistry);
2376      }
2377    };
2378
2379    @java.lang.Override
2380    public com.google.protobuf.Parser<TransitionToStandbyRequestProto> getParserForType() {
2381      return PARSER;
2382    }
2383
2384    private int bitField0_;
2385    // required .hadoop.common.HAStateChangeRequestInfoProto reqInfo = 1;
2386    public static final int REQINFO_FIELD_NUMBER = 1;
2387    private org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto reqInfo_;
2388    /**
2389     * <code>required .hadoop.common.HAStateChangeRequestInfoProto reqInfo = 1;</code>
2390     */
2391    public boolean hasReqInfo() {
2392      return ((bitField0_ & 0x00000001) == 0x00000001);
2393    }
2394    /**
2395     * <code>required .hadoop.common.HAStateChangeRequestInfoProto reqInfo = 1;</code>
2396     */
2397    public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto getReqInfo() {
2398      return reqInfo_;
2399    }
2400    /**
2401     * <code>required .hadoop.common.HAStateChangeRequestInfoProto reqInfo = 1;</code>
2402     */
2403    public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProtoOrBuilder getReqInfoOrBuilder() {
2404      return reqInfo_;
2405    }
2406
2407    private void initFields() {
2408      reqInfo_ = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.getDefaultInstance();
2409    }
2410    private byte memoizedIsInitialized = -1;
2411    public final boolean isInitialized() {
2412      byte isInitialized = memoizedIsInitialized;
2413      if (isInitialized != -1) return isInitialized == 1;
2414
2415      if (!hasReqInfo()) {
2416        memoizedIsInitialized = 0;
2417        return false;
2418      }
2419      if (!getReqInfo().isInitialized()) {
2420        memoizedIsInitialized = 0;
2421        return false;
2422      }
2423      memoizedIsInitialized = 1;
2424      return true;
2425    }
2426
2427    public void writeTo(com.google.protobuf.CodedOutputStream output)
2428                        throws java.io.IOException {
2429      getSerializedSize();
2430      if (((bitField0_ & 0x00000001) == 0x00000001)) {
2431        output.writeMessage(1, reqInfo_);
2432      }
2433      getUnknownFields().writeTo(output);
2434    }
2435
2436    private int memoizedSerializedSize = -1;
2437    public int getSerializedSize() {
2438      int size = memoizedSerializedSize;
2439      if (size != -1) return size;
2440
2441      size = 0;
2442      if (((bitField0_ & 0x00000001) == 0x00000001)) {
2443        size += com.google.protobuf.CodedOutputStream
2444          .computeMessageSize(1, reqInfo_);
2445      }
2446      size += getUnknownFields().getSerializedSize();
2447      memoizedSerializedSize = size;
2448      return size;
2449    }
2450
2451    private static final long serialVersionUID = 0L;
2452    @java.lang.Override
2453    protected java.lang.Object writeReplace()
2454        throws java.io.ObjectStreamException {
2455      return super.writeReplace();
2456    }
2457
2458    @java.lang.Override
2459    public boolean equals(final java.lang.Object obj) {
2460      if (obj == this) {
2461       return true;
2462      }
2463      if (!(obj instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto)) {
2464        return super.equals(obj);
2465      }
2466      org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto other = (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto) obj;
2467
2468      boolean result = true;
2469      result = result && (hasReqInfo() == other.hasReqInfo());
2470      if (hasReqInfo()) {
2471        result = result && getReqInfo()
2472            .equals(other.getReqInfo());
2473      }
2474      result = result &&
2475          getUnknownFields().equals(other.getUnknownFields());
2476      return result;
2477    }
2478
2479    private int memoizedHashCode = 0;
2480    @java.lang.Override
2481    public int hashCode() {
2482      if (memoizedHashCode != 0) {
2483        return memoizedHashCode;
2484      }
2485      int hash = 41;
2486      hash = (19 * hash) + getDescriptorForType().hashCode();
2487      if (hasReqInfo()) {
2488        hash = (37 * hash) + REQINFO_FIELD_NUMBER;
2489        hash = (53 * hash) + getReqInfo().hashCode();
2490      }
2491      hash = (29 * hash) + getUnknownFields().hashCode();
2492      memoizedHashCode = hash;
2493      return hash;
2494    }
2495
2496    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto parseFrom(
2497        com.google.protobuf.ByteString data)
2498        throws com.google.protobuf.InvalidProtocolBufferException {
2499      return PARSER.parseFrom(data);
2500    }
2501    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto parseFrom(
2502        com.google.protobuf.ByteString data,
2503        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2504        throws com.google.protobuf.InvalidProtocolBufferException {
2505      return PARSER.parseFrom(data, extensionRegistry);
2506    }
2507    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto parseFrom(byte[] data)
2508        throws com.google.protobuf.InvalidProtocolBufferException {
2509      return PARSER.parseFrom(data);
2510    }
2511    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto parseFrom(
2512        byte[] data,
2513        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2514        throws com.google.protobuf.InvalidProtocolBufferException {
2515      return PARSER.parseFrom(data, extensionRegistry);
2516    }
2517    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto parseFrom(java.io.InputStream input)
2518        throws java.io.IOException {
2519      return PARSER.parseFrom(input);
2520    }
2521    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto parseFrom(
2522        java.io.InputStream input,
2523        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2524        throws java.io.IOException {
2525      return PARSER.parseFrom(input, extensionRegistry);
2526    }
2527    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto parseDelimitedFrom(java.io.InputStream input)
2528        throws java.io.IOException {
2529      return PARSER.parseDelimitedFrom(input);
2530    }
2531    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto parseDelimitedFrom(
2532        java.io.InputStream input,
2533        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2534        throws java.io.IOException {
2535      return PARSER.parseDelimitedFrom(input, extensionRegistry);
2536    }
2537    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto parseFrom(
2538        com.google.protobuf.CodedInputStream input)
2539        throws java.io.IOException {
2540      return PARSER.parseFrom(input);
2541    }
2542    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto parseFrom(
2543        com.google.protobuf.CodedInputStream input,
2544        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2545        throws java.io.IOException {
2546      return PARSER.parseFrom(input, extensionRegistry);
2547    }
2548
2549    public static Builder newBuilder() { return Builder.create(); }
2550    public Builder newBuilderForType() { return newBuilder(); }
2551    public static Builder newBuilder(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto prototype) {
2552      return newBuilder().mergeFrom(prototype);
2553    }
2554    public Builder toBuilder() { return newBuilder(this); }
2555
2556    @java.lang.Override
2557    protected Builder newBuilderForType(
2558        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2559      Builder builder = new Builder(parent);
2560      return builder;
2561    }
2562    /**
2563     * Protobuf type {@code hadoop.common.TransitionToStandbyRequestProto}
2564     *
2565     * <pre>
2566     **
2567     * void request
2568     * </pre>
2569     */
2570    public static final class Builder extends
2571        com.google.protobuf.GeneratedMessage.Builder<Builder>
2572       implements org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProtoOrBuilder {
2573      public static final com.google.protobuf.Descriptors.Descriptor
2574          getDescriptor() {
2575        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_TransitionToStandbyRequestProto_descriptor;
2576      }
2577
2578      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
2579          internalGetFieldAccessorTable() {
2580        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_TransitionToStandbyRequestProto_fieldAccessorTable
2581            .ensureFieldAccessorsInitialized(
2582                org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto.class, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto.Builder.class);
2583      }
2584
2585      // Construct using org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto.newBuilder()
2586      private Builder() {
2587        maybeForceBuilderInitialization();
2588      }
2589
2590      private Builder(
2591          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2592        super(parent);
2593        maybeForceBuilderInitialization();
2594      }
2595      private void maybeForceBuilderInitialization() {
2596        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
2597          getReqInfoFieldBuilder();
2598        }
2599      }
2600      private static Builder create() {
2601        return new Builder();
2602      }
2603
2604      public Builder clear() {
2605        super.clear();
2606        if (reqInfoBuilder_ == null) {
2607          reqInfo_ = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.getDefaultInstance();
2608        } else {
2609          reqInfoBuilder_.clear();
2610        }
2611        bitField0_ = (bitField0_ & ~0x00000001);
2612        return this;
2613      }
2614
2615      public Builder clone() {
2616        return create().mergeFrom(buildPartial());
2617      }
2618
2619      public com.google.protobuf.Descriptors.Descriptor
2620          getDescriptorForType() {
2621        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_TransitionToStandbyRequestProto_descriptor;
2622      }
2623
2624      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto getDefaultInstanceForType() {
2625        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto.getDefaultInstance();
2626      }
2627
2628      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto build() {
2629        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto result = buildPartial();
2630        if (!result.isInitialized()) {
2631          throw newUninitializedMessageException(result);
2632        }
2633        return result;
2634      }
2635
2636      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto buildPartial() {
2637        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto result = new org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto(this);
2638        int from_bitField0_ = bitField0_;
2639        int to_bitField0_ = 0;
2640        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
2641          to_bitField0_ |= 0x00000001;
2642        }
2643        if (reqInfoBuilder_ == null) {
2644          result.reqInfo_ = reqInfo_;
2645        } else {
2646          result.reqInfo_ = reqInfoBuilder_.build();
2647        }
2648        result.bitField0_ = to_bitField0_;
2649        onBuilt();
2650        return result;
2651      }
2652
2653      public Builder mergeFrom(com.google.protobuf.Message other) {
2654        if (other instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto) {
2655          return mergeFrom((org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto)other);
2656        } else {
2657          super.mergeFrom(other);
2658          return this;
2659        }
2660      }
2661
2662      public Builder mergeFrom(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto other) {
2663        if (other == org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto.getDefaultInstance()) return this;
2664        if (other.hasReqInfo()) {
2665          mergeReqInfo(other.getReqInfo());
2666        }
2667        this.mergeUnknownFields(other.getUnknownFields());
2668        return this;
2669      }
2670
2671      public final boolean isInitialized() {
2672        if (!hasReqInfo()) {
2673          
2674          return false;
2675        }
2676        if (!getReqInfo().isInitialized()) {
2677          
2678          return false;
2679        }
2680        return true;
2681      }
2682
2683      public Builder mergeFrom(
2684          com.google.protobuf.CodedInputStream input,
2685          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2686          throws java.io.IOException {
2687        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto parsedMessage = null;
2688        try {
2689          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
2690        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
2691          parsedMessage = (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto) e.getUnfinishedMessage();
2692          throw e;
2693        } finally {
2694          if (parsedMessage != null) {
2695            mergeFrom(parsedMessage);
2696          }
2697        }
2698        return this;
2699      }
2700      private int bitField0_;
2701
2702      // required .hadoop.common.HAStateChangeRequestInfoProto reqInfo = 1;
2703      private org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto reqInfo_ = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.getDefaultInstance();
2704      private com.google.protobuf.SingleFieldBuilder<
2705          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.Builder, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProtoOrBuilder> reqInfoBuilder_;
2706      /**
2707       * <code>required .hadoop.common.HAStateChangeRequestInfoProto reqInfo = 1;</code>
2708       */
2709      public boolean hasReqInfo() {
2710        return ((bitField0_ & 0x00000001) == 0x00000001);
2711      }
2712      /**
2713       * <code>required .hadoop.common.HAStateChangeRequestInfoProto reqInfo = 1;</code>
2714       */
2715      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto getReqInfo() {
2716        if (reqInfoBuilder_ == null) {
2717          return reqInfo_;
2718        } else {
2719          return reqInfoBuilder_.getMessage();
2720        }
2721      }
2722      /**
2723       * <code>required .hadoop.common.HAStateChangeRequestInfoProto reqInfo = 1;</code>
2724       */
2725      public Builder setReqInfo(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto value) {
2726        if (reqInfoBuilder_ == null) {
2727          if (value == null) {
2728            throw new NullPointerException();
2729          }
2730          reqInfo_ = value;
2731          onChanged();
2732        } else {
2733          reqInfoBuilder_.setMessage(value);
2734        }
2735        bitField0_ |= 0x00000001;
2736        return this;
2737      }
2738      /**
2739       * <code>required .hadoop.common.HAStateChangeRequestInfoProto reqInfo = 1;</code>
2740       */
2741      public Builder setReqInfo(
2742          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.Builder builderForValue) {
2743        if (reqInfoBuilder_ == null) {
2744          reqInfo_ = builderForValue.build();
2745          onChanged();
2746        } else {
2747          reqInfoBuilder_.setMessage(builderForValue.build());
2748        }
2749        bitField0_ |= 0x00000001;
2750        return this;
2751      }
2752      /**
2753       * <code>required .hadoop.common.HAStateChangeRequestInfoProto reqInfo = 1;</code>
2754       */
2755      public Builder mergeReqInfo(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto value) {
2756        if (reqInfoBuilder_ == null) {
2757          if (((bitField0_ & 0x00000001) == 0x00000001) &&
2758              reqInfo_ != org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.getDefaultInstance()) {
2759            reqInfo_ =
2760              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.newBuilder(reqInfo_).mergeFrom(value).buildPartial();
2761          } else {
2762            reqInfo_ = value;
2763          }
2764          onChanged();
2765        } else {
2766          reqInfoBuilder_.mergeFrom(value);
2767        }
2768        bitField0_ |= 0x00000001;
2769        return this;
2770      }
2771      /**
2772       * <code>required .hadoop.common.HAStateChangeRequestInfoProto reqInfo = 1;</code>
2773       */
2774      public Builder clearReqInfo() {
2775        if (reqInfoBuilder_ == null) {
2776          reqInfo_ = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.getDefaultInstance();
2777          onChanged();
2778        } else {
2779          reqInfoBuilder_.clear();
2780        }
2781        bitField0_ = (bitField0_ & ~0x00000001);
2782        return this;
2783      }
2784      /**
2785       * <code>required .hadoop.common.HAStateChangeRequestInfoProto reqInfo = 1;</code>
2786       */
2787      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.Builder getReqInfoBuilder() {
2788        bitField0_ |= 0x00000001;
2789        onChanged();
2790        return getReqInfoFieldBuilder().getBuilder();
2791      }
2792      /**
2793       * <code>required .hadoop.common.HAStateChangeRequestInfoProto reqInfo = 1;</code>
2794       */
2795      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProtoOrBuilder getReqInfoOrBuilder() {
2796        if (reqInfoBuilder_ != null) {
2797          return reqInfoBuilder_.getMessageOrBuilder();
2798        } else {
2799          return reqInfo_;
2800        }
2801      }
2802      /**
2803       * <code>required .hadoop.common.HAStateChangeRequestInfoProto reqInfo = 1;</code>
2804       */
2805      private com.google.protobuf.SingleFieldBuilder<
2806          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.Builder, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProtoOrBuilder> 
2807          getReqInfoFieldBuilder() {
2808        if (reqInfoBuilder_ == null) {
2809          reqInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder<
2810              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProto.Builder, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAStateChangeRequestInfoProtoOrBuilder>(
2811                  reqInfo_,
2812                  getParentForChildren(),
2813                  isClean());
2814          reqInfo_ = null;
2815        }
2816        return reqInfoBuilder_;
2817      }
2818
2819      // @@protoc_insertion_point(builder_scope:hadoop.common.TransitionToStandbyRequestProto)
2820    }
2821
2822    static {
2823      defaultInstance = new TransitionToStandbyRequestProto(true);
2824      defaultInstance.initFields();
2825    }
2826
2827    // @@protoc_insertion_point(class_scope:hadoop.common.TransitionToStandbyRequestProto)
2828  }
2829
2830  public interface TransitionToStandbyResponseProtoOrBuilder
2831      extends com.google.protobuf.MessageOrBuilder {
2832  }
2833  /**
2834   * Protobuf type {@code hadoop.common.TransitionToStandbyResponseProto}
2835   *
2836   * <pre>
2837   **
2838   * void response
2839   * </pre>
2840   */
2841  public static final class TransitionToStandbyResponseProto extends
2842      com.google.protobuf.GeneratedMessage
2843      implements TransitionToStandbyResponseProtoOrBuilder {
2844    // Use TransitionToStandbyResponseProto.newBuilder() to construct.
2845    private TransitionToStandbyResponseProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
2846      super(builder);
2847      this.unknownFields = builder.getUnknownFields();
2848    }
2849    private TransitionToStandbyResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
2850
2851    private static final TransitionToStandbyResponseProto defaultInstance;
2852    public static TransitionToStandbyResponseProto getDefaultInstance() {
2853      return defaultInstance;
2854    }
2855
2856    public TransitionToStandbyResponseProto getDefaultInstanceForType() {
2857      return defaultInstance;
2858    }
2859
2860    private final com.google.protobuf.UnknownFieldSet unknownFields;
2861    @java.lang.Override
2862    public final com.google.protobuf.UnknownFieldSet
2863        getUnknownFields() {
2864      return this.unknownFields;
2865    }
2866    private TransitionToStandbyResponseProto(
2867        com.google.protobuf.CodedInputStream input,
2868        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2869        throws com.google.protobuf.InvalidProtocolBufferException {
2870      initFields();
2871      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
2872          com.google.protobuf.UnknownFieldSet.newBuilder();
2873      try {
2874        boolean done = false;
2875        while (!done) {
2876          int tag = input.readTag();
2877          switch (tag) {
2878            case 0:
2879              done = true;
2880              break;
2881            default: {
2882              if (!parseUnknownField(input, unknownFields,
2883                                     extensionRegistry, tag)) {
2884                done = true;
2885              }
2886              break;
2887            }
2888          }
2889        }
2890      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
2891        throw e.setUnfinishedMessage(this);
2892      } catch (java.io.IOException e) {
2893        throw new com.google.protobuf.InvalidProtocolBufferException(
2894            e.getMessage()).setUnfinishedMessage(this);
2895      } finally {
2896        this.unknownFields = unknownFields.build();
2897        makeExtensionsImmutable();
2898      }
2899    }
2900    public static final com.google.protobuf.Descriptors.Descriptor
2901        getDescriptor() {
2902      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_TransitionToStandbyResponseProto_descriptor;
2903    }
2904
2905    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
2906        internalGetFieldAccessorTable() {
2907      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_TransitionToStandbyResponseProto_fieldAccessorTable
2908          .ensureFieldAccessorsInitialized(
2909              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto.class, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto.Builder.class);
2910    }
2911
2912    public static com.google.protobuf.Parser<TransitionToStandbyResponseProto> PARSER =
2913        new com.google.protobuf.AbstractParser<TransitionToStandbyResponseProto>() {
2914      public TransitionToStandbyResponseProto parsePartialFrom(
2915          com.google.protobuf.CodedInputStream input,
2916          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2917          throws com.google.protobuf.InvalidProtocolBufferException {
2918        return new TransitionToStandbyResponseProto(input, extensionRegistry);
2919      }
2920    };
2921
2922    @java.lang.Override
2923    public com.google.protobuf.Parser<TransitionToStandbyResponseProto> getParserForType() {
2924      return PARSER;
2925    }
2926
2927    private void initFields() {
2928    }
2929    private byte memoizedIsInitialized = -1;
2930    public final boolean isInitialized() {
2931      byte isInitialized = memoizedIsInitialized;
2932      if (isInitialized != -1) return isInitialized == 1;
2933
2934      memoizedIsInitialized = 1;
2935      return true;
2936    }
2937
2938    public void writeTo(com.google.protobuf.CodedOutputStream output)
2939                        throws java.io.IOException {
2940      getSerializedSize();
2941      getUnknownFields().writeTo(output);
2942    }
2943
2944    private int memoizedSerializedSize = -1;
2945    public int getSerializedSize() {
2946      int size = memoizedSerializedSize;
2947      if (size != -1) return size;
2948
2949      size = 0;
2950      size += getUnknownFields().getSerializedSize();
2951      memoizedSerializedSize = size;
2952      return size;
2953    }
2954
2955    private static final long serialVersionUID = 0L;
2956    @java.lang.Override
2957    protected java.lang.Object writeReplace()
2958        throws java.io.ObjectStreamException {
2959      return super.writeReplace();
2960    }
2961
2962    @java.lang.Override
2963    public boolean equals(final java.lang.Object obj) {
2964      if (obj == this) {
2965       return true;
2966      }
2967      if (!(obj instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto)) {
2968        return super.equals(obj);
2969      }
2970      org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto other = (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto) obj;
2971
2972      boolean result = true;
2973      result = result &&
2974          getUnknownFields().equals(other.getUnknownFields());
2975      return result;
2976    }
2977
2978    private int memoizedHashCode = 0;
2979    @java.lang.Override
2980    public int hashCode() {
2981      if (memoizedHashCode != 0) {
2982        return memoizedHashCode;
2983      }
2984      int hash = 41;
2985      hash = (19 * hash) + getDescriptorForType().hashCode();
2986      hash = (29 * hash) + getUnknownFields().hashCode();
2987      memoizedHashCode = hash;
2988      return hash;
2989    }
2990
2991    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto parseFrom(
2992        com.google.protobuf.ByteString data)
2993        throws com.google.protobuf.InvalidProtocolBufferException {
2994      return PARSER.parseFrom(data);
2995    }
2996    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto parseFrom(
2997        com.google.protobuf.ByteString data,
2998        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2999        throws com.google.protobuf.InvalidProtocolBufferException {
3000      return PARSER.parseFrom(data, extensionRegistry);
3001    }
3002    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto parseFrom(byte[] data)
3003        throws com.google.protobuf.InvalidProtocolBufferException {
3004      return PARSER.parseFrom(data);
3005    }
3006    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto parseFrom(
3007        byte[] data,
3008        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3009        throws com.google.protobuf.InvalidProtocolBufferException {
3010      return PARSER.parseFrom(data, extensionRegistry);
3011    }
3012    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto parseFrom(java.io.InputStream input)
3013        throws java.io.IOException {
3014      return PARSER.parseFrom(input);
3015    }
3016    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto parseFrom(
3017        java.io.InputStream input,
3018        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3019        throws java.io.IOException {
3020      return PARSER.parseFrom(input, extensionRegistry);
3021    }
3022    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto parseDelimitedFrom(java.io.InputStream input)
3023        throws java.io.IOException {
3024      return PARSER.parseDelimitedFrom(input);
3025    }
3026    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto parseDelimitedFrom(
3027        java.io.InputStream input,
3028        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3029        throws java.io.IOException {
3030      return PARSER.parseDelimitedFrom(input, extensionRegistry);
3031    }
3032    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto parseFrom(
3033        com.google.protobuf.CodedInputStream input)
3034        throws java.io.IOException {
3035      return PARSER.parseFrom(input);
3036    }
3037    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto parseFrom(
3038        com.google.protobuf.CodedInputStream input,
3039        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3040        throws java.io.IOException {
3041      return PARSER.parseFrom(input, extensionRegistry);
3042    }
3043
3044    public static Builder newBuilder() { return Builder.create(); }
3045    public Builder newBuilderForType() { return newBuilder(); }
3046    public static Builder newBuilder(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto prototype) {
3047      return newBuilder().mergeFrom(prototype);
3048    }
3049    public Builder toBuilder() { return newBuilder(this); }
3050
3051    @java.lang.Override
3052    protected Builder newBuilderForType(
3053        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
3054      Builder builder = new Builder(parent);
3055      return builder;
3056    }
3057    /**
3058     * Protobuf type {@code hadoop.common.TransitionToStandbyResponseProto}
3059     *
3060     * <pre>
3061     **
3062     * void response
3063     * </pre>
3064     */
3065    public static final class Builder extends
3066        com.google.protobuf.GeneratedMessage.Builder<Builder>
3067       implements org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProtoOrBuilder {
3068      public static final com.google.protobuf.Descriptors.Descriptor
3069          getDescriptor() {
3070        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_TransitionToStandbyResponseProto_descriptor;
3071      }
3072
3073      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
3074          internalGetFieldAccessorTable() {
3075        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_TransitionToStandbyResponseProto_fieldAccessorTable
3076            .ensureFieldAccessorsInitialized(
3077                org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto.class, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto.Builder.class);
3078      }
3079
3080      // Construct using org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto.newBuilder()
3081      private Builder() {
3082        maybeForceBuilderInitialization();
3083      }
3084
3085      private Builder(
3086          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
3087        super(parent);
3088        maybeForceBuilderInitialization();
3089      }
3090      private void maybeForceBuilderInitialization() {
3091        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
3092        }
3093      }
3094      private static Builder create() {
3095        return new Builder();
3096      }
3097
3098      public Builder clear() {
3099        super.clear();
3100        return this;
3101      }
3102
3103      public Builder clone() {
3104        return create().mergeFrom(buildPartial());
3105      }
3106
3107      public com.google.protobuf.Descriptors.Descriptor
3108          getDescriptorForType() {
3109        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_TransitionToStandbyResponseProto_descriptor;
3110      }
3111
3112      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto getDefaultInstanceForType() {
3113        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto.getDefaultInstance();
3114      }
3115
3116      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto build() {
3117        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto result = buildPartial();
3118        if (!result.isInitialized()) {
3119          throw newUninitializedMessageException(result);
3120        }
3121        return result;
3122      }
3123
3124      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto buildPartial() {
3125        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto result = new org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto(this);
3126        onBuilt();
3127        return result;
3128      }
3129
3130      public Builder mergeFrom(com.google.protobuf.Message other) {
3131        if (other instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto) {
3132          return mergeFrom((org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto)other);
3133        } else {
3134          super.mergeFrom(other);
3135          return this;
3136        }
3137      }
3138
3139      public Builder mergeFrom(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto other) {
3140        if (other == org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto.getDefaultInstance()) return this;
3141        this.mergeUnknownFields(other.getUnknownFields());
3142        return this;
3143      }
3144
3145      public final boolean isInitialized() {
3146        return true;
3147      }
3148
3149      public Builder mergeFrom(
3150          com.google.protobuf.CodedInputStream input,
3151          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3152          throws java.io.IOException {
3153        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto parsedMessage = null;
3154        try {
3155          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
3156        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
3157          parsedMessage = (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto) e.getUnfinishedMessage();
3158          throw e;
3159        } finally {
3160          if (parsedMessage != null) {
3161            mergeFrom(parsedMessage);
3162          }
3163        }
3164        return this;
3165      }
3166
3167      // @@protoc_insertion_point(builder_scope:hadoop.common.TransitionToStandbyResponseProto)
3168    }
3169
3170    static {
3171      defaultInstance = new TransitionToStandbyResponseProto(true);
3172      defaultInstance.initFields();
3173    }
3174
3175    // @@protoc_insertion_point(class_scope:hadoop.common.TransitionToStandbyResponseProto)
3176  }
3177
3178  public interface GetServiceStatusRequestProtoOrBuilder
3179      extends com.google.protobuf.MessageOrBuilder {
3180  }
3181  /**
3182   * Protobuf type {@code hadoop.common.GetServiceStatusRequestProto}
3183   *
3184   * <pre>
3185   **
3186   * void request
3187   * </pre>
3188   */
3189  public static final class GetServiceStatusRequestProto extends
3190      com.google.protobuf.GeneratedMessage
3191      implements GetServiceStatusRequestProtoOrBuilder {
3192    // Use GetServiceStatusRequestProto.newBuilder() to construct.
3193    private GetServiceStatusRequestProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
3194      super(builder);
3195      this.unknownFields = builder.getUnknownFields();
3196    }
3197    private GetServiceStatusRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
3198
3199    private static final GetServiceStatusRequestProto defaultInstance;
3200    public static GetServiceStatusRequestProto getDefaultInstance() {
3201      return defaultInstance;
3202    }
3203
3204    public GetServiceStatusRequestProto getDefaultInstanceForType() {
3205      return defaultInstance;
3206    }
3207
3208    private final com.google.protobuf.UnknownFieldSet unknownFields;
3209    @java.lang.Override
3210    public final com.google.protobuf.UnknownFieldSet
3211        getUnknownFields() {
3212      return this.unknownFields;
3213    }
3214    private GetServiceStatusRequestProto(
3215        com.google.protobuf.CodedInputStream input,
3216        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3217        throws com.google.protobuf.InvalidProtocolBufferException {
3218      initFields();
3219      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
3220          com.google.protobuf.UnknownFieldSet.newBuilder();
3221      try {
3222        boolean done = false;
3223        while (!done) {
3224          int tag = input.readTag();
3225          switch (tag) {
3226            case 0:
3227              done = true;
3228              break;
3229            default: {
3230              if (!parseUnknownField(input, unknownFields,
3231                                     extensionRegistry, tag)) {
3232                done = true;
3233              }
3234              break;
3235            }
3236          }
3237        }
3238      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
3239        throw e.setUnfinishedMessage(this);
3240      } catch (java.io.IOException e) {
3241        throw new com.google.protobuf.InvalidProtocolBufferException(
3242            e.getMessage()).setUnfinishedMessage(this);
3243      } finally {
3244        this.unknownFields = unknownFields.build();
3245        makeExtensionsImmutable();
3246      }
3247    }
3248    public static final com.google.protobuf.Descriptors.Descriptor
3249        getDescriptor() {
3250      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_GetServiceStatusRequestProto_descriptor;
3251    }
3252
3253    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
3254        internalGetFieldAccessorTable() {
3255      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_GetServiceStatusRequestProto_fieldAccessorTable
3256          .ensureFieldAccessorsInitialized(
3257              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto.class, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto.Builder.class);
3258    }
3259
3260    public static com.google.protobuf.Parser<GetServiceStatusRequestProto> PARSER =
3261        new com.google.protobuf.AbstractParser<GetServiceStatusRequestProto>() {
3262      public GetServiceStatusRequestProto parsePartialFrom(
3263          com.google.protobuf.CodedInputStream input,
3264          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3265          throws com.google.protobuf.InvalidProtocolBufferException {
3266        return new GetServiceStatusRequestProto(input, extensionRegistry);
3267      }
3268    };
3269
3270    @java.lang.Override
3271    public com.google.protobuf.Parser<GetServiceStatusRequestProto> getParserForType() {
3272      return PARSER;
3273    }
3274
3275    private void initFields() {
3276    }
3277    private byte memoizedIsInitialized = -1;
3278    public final boolean isInitialized() {
3279      byte isInitialized = memoizedIsInitialized;
3280      if (isInitialized != -1) return isInitialized == 1;
3281
3282      memoizedIsInitialized = 1;
3283      return true;
3284    }
3285
3286    public void writeTo(com.google.protobuf.CodedOutputStream output)
3287                        throws java.io.IOException {
3288      getSerializedSize();
3289      getUnknownFields().writeTo(output);
3290    }
3291
3292    private int memoizedSerializedSize = -1;
3293    public int getSerializedSize() {
3294      int size = memoizedSerializedSize;
3295      if (size != -1) return size;
3296
3297      size = 0;
3298      size += getUnknownFields().getSerializedSize();
3299      memoizedSerializedSize = size;
3300      return size;
3301    }
3302
3303    private static final long serialVersionUID = 0L;
3304    @java.lang.Override
3305    protected java.lang.Object writeReplace()
3306        throws java.io.ObjectStreamException {
3307      return super.writeReplace();
3308    }
3309
3310    @java.lang.Override
3311    public boolean equals(final java.lang.Object obj) {
3312      if (obj == this) {
3313       return true;
3314      }
3315      if (!(obj instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto)) {
3316        return super.equals(obj);
3317      }
3318      org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto other = (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto) obj;
3319
3320      boolean result = true;
3321      result = result &&
3322          getUnknownFields().equals(other.getUnknownFields());
3323      return result;
3324    }
3325
3326    private int memoizedHashCode = 0;
3327    @java.lang.Override
3328    public int hashCode() {
3329      if (memoizedHashCode != 0) {
3330        return memoizedHashCode;
3331      }
3332      int hash = 41;
3333      hash = (19 * hash) + getDescriptorForType().hashCode();
3334      hash = (29 * hash) + getUnknownFields().hashCode();
3335      memoizedHashCode = hash;
3336      return hash;
3337    }
3338
3339    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto parseFrom(
3340        com.google.protobuf.ByteString data)
3341        throws com.google.protobuf.InvalidProtocolBufferException {
3342      return PARSER.parseFrom(data);
3343    }
3344    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto parseFrom(
3345        com.google.protobuf.ByteString data,
3346        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3347        throws com.google.protobuf.InvalidProtocolBufferException {
3348      return PARSER.parseFrom(data, extensionRegistry);
3349    }
3350    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto parseFrom(byte[] data)
3351        throws com.google.protobuf.InvalidProtocolBufferException {
3352      return PARSER.parseFrom(data);
3353    }
3354    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto parseFrom(
3355        byte[] data,
3356        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3357        throws com.google.protobuf.InvalidProtocolBufferException {
3358      return PARSER.parseFrom(data, extensionRegistry);
3359    }
3360    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto parseFrom(java.io.InputStream input)
3361        throws java.io.IOException {
3362      return PARSER.parseFrom(input);
3363    }
3364    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto parseFrom(
3365        java.io.InputStream input,
3366        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3367        throws java.io.IOException {
3368      return PARSER.parseFrom(input, extensionRegistry);
3369    }
3370    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto parseDelimitedFrom(java.io.InputStream input)
3371        throws java.io.IOException {
3372      return PARSER.parseDelimitedFrom(input);
3373    }
3374    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto parseDelimitedFrom(
3375        java.io.InputStream input,
3376        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3377        throws java.io.IOException {
3378      return PARSER.parseDelimitedFrom(input, extensionRegistry);
3379    }
3380    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto parseFrom(
3381        com.google.protobuf.CodedInputStream input)
3382        throws java.io.IOException {
3383      return PARSER.parseFrom(input);
3384    }
3385    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto parseFrom(
3386        com.google.protobuf.CodedInputStream input,
3387        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3388        throws java.io.IOException {
3389      return PARSER.parseFrom(input, extensionRegistry);
3390    }
3391
3392    public static Builder newBuilder() { return Builder.create(); }
3393    public Builder newBuilderForType() { return newBuilder(); }
3394    public static Builder newBuilder(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto prototype) {
3395      return newBuilder().mergeFrom(prototype);
3396    }
3397    public Builder toBuilder() { return newBuilder(this); }
3398
3399    @java.lang.Override
3400    protected Builder newBuilderForType(
3401        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
3402      Builder builder = new Builder(parent);
3403      return builder;
3404    }
3405    /**
3406     * Protobuf type {@code hadoop.common.GetServiceStatusRequestProto}
3407     *
3408     * <pre>
3409     **
3410     * void request
3411     * </pre>
3412     */
3413    public static final class Builder extends
3414        com.google.protobuf.GeneratedMessage.Builder<Builder>
3415       implements org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProtoOrBuilder {
3416      public static final com.google.protobuf.Descriptors.Descriptor
3417          getDescriptor() {
3418        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_GetServiceStatusRequestProto_descriptor;
3419      }
3420
3421      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
3422          internalGetFieldAccessorTable() {
3423        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_GetServiceStatusRequestProto_fieldAccessorTable
3424            .ensureFieldAccessorsInitialized(
3425                org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto.class, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto.Builder.class);
3426      }
3427
3428      // Construct using org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto.newBuilder()
3429      private Builder() {
3430        maybeForceBuilderInitialization();
3431      }
3432
3433      private Builder(
3434          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
3435        super(parent);
3436        maybeForceBuilderInitialization();
3437      }
3438      private void maybeForceBuilderInitialization() {
3439        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
3440        }
3441      }
3442      private static Builder create() {
3443        return new Builder();
3444      }
3445
3446      public Builder clear() {
3447        super.clear();
3448        return this;
3449      }
3450
3451      public Builder clone() {
3452        return create().mergeFrom(buildPartial());
3453      }
3454
3455      public com.google.protobuf.Descriptors.Descriptor
3456          getDescriptorForType() {
3457        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_GetServiceStatusRequestProto_descriptor;
3458      }
3459
3460      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto getDefaultInstanceForType() {
3461        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto.getDefaultInstance();
3462      }
3463
3464      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto build() {
3465        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto result = buildPartial();
3466        if (!result.isInitialized()) {
3467          throw newUninitializedMessageException(result);
3468        }
3469        return result;
3470      }
3471
3472      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto buildPartial() {
3473        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto result = new org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto(this);
3474        onBuilt();
3475        return result;
3476      }
3477
3478      public Builder mergeFrom(com.google.protobuf.Message other) {
3479        if (other instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto) {
3480          return mergeFrom((org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto)other);
3481        } else {
3482          super.mergeFrom(other);
3483          return this;
3484        }
3485      }
3486
3487      public Builder mergeFrom(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto other) {
3488        if (other == org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto.getDefaultInstance()) return this;
3489        this.mergeUnknownFields(other.getUnknownFields());
3490        return this;
3491      }
3492
3493      public final boolean isInitialized() {
3494        return true;
3495      }
3496
3497      public Builder mergeFrom(
3498          com.google.protobuf.CodedInputStream input,
3499          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3500          throws java.io.IOException {
3501        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto parsedMessage = null;
3502        try {
3503          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
3504        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
3505          parsedMessage = (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto) e.getUnfinishedMessage();
3506          throw e;
3507        } finally {
3508          if (parsedMessage != null) {
3509            mergeFrom(parsedMessage);
3510          }
3511        }
3512        return this;
3513      }
3514
3515      // @@protoc_insertion_point(builder_scope:hadoop.common.GetServiceStatusRequestProto)
3516    }
3517
3518    static {
3519      defaultInstance = new GetServiceStatusRequestProto(true);
3520      defaultInstance.initFields();
3521    }
3522
3523    // @@protoc_insertion_point(class_scope:hadoop.common.GetServiceStatusRequestProto)
3524  }
3525
3526  public interface GetServiceStatusResponseProtoOrBuilder
3527      extends com.google.protobuf.MessageOrBuilder {
3528
3529    // required .hadoop.common.HAServiceStateProto state = 1;
3530    /**
3531     * <code>required .hadoop.common.HAServiceStateProto state = 1;</code>
3532     */
3533    boolean hasState();
3534    /**
3535     * <code>required .hadoop.common.HAServiceStateProto state = 1;</code>
3536     */
3537    org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceStateProto getState();
3538
3539    // optional bool readyToBecomeActive = 2;
3540    /**
3541     * <code>optional bool readyToBecomeActive = 2;</code>
3542     *
3543     * <pre>
3544     * If state is STANDBY, indicate whether it is
3545     * ready to become active.
3546     * </pre>
3547     */
3548    boolean hasReadyToBecomeActive();
3549    /**
3550     * <code>optional bool readyToBecomeActive = 2;</code>
3551     *
3552     * <pre>
3553     * If state is STANDBY, indicate whether it is
3554     * ready to become active.
3555     * </pre>
3556     */
3557    boolean getReadyToBecomeActive();
3558
3559    // optional string notReadyReason = 3;
3560    /**
3561     * <code>optional string notReadyReason = 3;</code>
3562     *
3563     * <pre>
3564     * If not ready to become active, a textual explanation of why not
3565     * </pre>
3566     */
3567    boolean hasNotReadyReason();
3568    /**
3569     * <code>optional string notReadyReason = 3;</code>
3570     *
3571     * <pre>
3572     * If not ready to become active, a textual explanation of why not
3573     * </pre>
3574     */
3575    java.lang.String getNotReadyReason();
3576    /**
3577     * <code>optional string notReadyReason = 3;</code>
3578     *
3579     * <pre>
3580     * If not ready to become active, a textual explanation of why not
3581     * </pre>
3582     */
3583    com.google.protobuf.ByteString
3584        getNotReadyReasonBytes();
3585  }
3586  /**
3587   * Protobuf type {@code hadoop.common.GetServiceStatusResponseProto}
3588   *
3589   * <pre>
3590   **
3591   * Returns the state of the service
3592   * </pre>
3593   */
3594  public static final class GetServiceStatusResponseProto extends
3595      com.google.protobuf.GeneratedMessage
3596      implements GetServiceStatusResponseProtoOrBuilder {
3597    // Use GetServiceStatusResponseProto.newBuilder() to construct.
3598    private GetServiceStatusResponseProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
3599      super(builder);
3600      this.unknownFields = builder.getUnknownFields();
3601    }
3602    private GetServiceStatusResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
3603
3604    private static final GetServiceStatusResponseProto defaultInstance;
3605    public static GetServiceStatusResponseProto getDefaultInstance() {
3606      return defaultInstance;
3607    }
3608
3609    public GetServiceStatusResponseProto getDefaultInstanceForType() {
3610      return defaultInstance;
3611    }
3612
3613    private final com.google.protobuf.UnknownFieldSet unknownFields;
3614    @java.lang.Override
3615    public final com.google.protobuf.UnknownFieldSet
3616        getUnknownFields() {
3617      return this.unknownFields;
3618    }
3619    private GetServiceStatusResponseProto(
3620        com.google.protobuf.CodedInputStream input,
3621        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3622        throws com.google.protobuf.InvalidProtocolBufferException {
3623      initFields();
3624      int mutable_bitField0_ = 0;
3625      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
3626          com.google.protobuf.UnknownFieldSet.newBuilder();
3627      try {
3628        boolean done = false;
3629        while (!done) {
3630          int tag = input.readTag();
3631          switch (tag) {
3632            case 0:
3633              done = true;
3634              break;
3635            default: {
3636              if (!parseUnknownField(input, unknownFields,
3637                                     extensionRegistry, tag)) {
3638                done = true;
3639              }
3640              break;
3641            }
3642            case 8: {
3643              int rawValue = input.readEnum();
3644              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceStateProto value = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceStateProto.valueOf(rawValue);
3645              if (value == null) {
3646                unknownFields.mergeVarintField(1, rawValue);
3647              } else {
3648                bitField0_ |= 0x00000001;
3649                state_ = value;
3650              }
3651              break;
3652            }
3653            case 16: {
3654              bitField0_ |= 0x00000002;
3655              readyToBecomeActive_ = input.readBool();
3656              break;
3657            }
3658            case 26: {
3659              bitField0_ |= 0x00000004;
3660              notReadyReason_ = input.readBytes();
3661              break;
3662            }
3663          }
3664        }
3665      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
3666        throw e.setUnfinishedMessage(this);
3667      } catch (java.io.IOException e) {
3668        throw new com.google.protobuf.InvalidProtocolBufferException(
3669            e.getMessage()).setUnfinishedMessage(this);
3670      } finally {
3671        this.unknownFields = unknownFields.build();
3672        makeExtensionsImmutable();
3673      }
3674    }
3675    public static final com.google.protobuf.Descriptors.Descriptor
3676        getDescriptor() {
3677      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_GetServiceStatusResponseProto_descriptor;
3678    }
3679
3680    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
3681        internalGetFieldAccessorTable() {
3682      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_GetServiceStatusResponseProto_fieldAccessorTable
3683          .ensureFieldAccessorsInitialized(
3684              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto.class, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto.Builder.class);
3685    }
3686
3687    public static com.google.protobuf.Parser<GetServiceStatusResponseProto> PARSER =
3688        new com.google.protobuf.AbstractParser<GetServiceStatusResponseProto>() {
3689      public GetServiceStatusResponseProto parsePartialFrom(
3690          com.google.protobuf.CodedInputStream input,
3691          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3692          throws com.google.protobuf.InvalidProtocolBufferException {
3693        return new GetServiceStatusResponseProto(input, extensionRegistry);
3694      }
3695    };
3696
3697    @java.lang.Override
3698    public com.google.protobuf.Parser<GetServiceStatusResponseProto> getParserForType() {
3699      return PARSER;
3700    }
3701
3702    private int bitField0_;
3703    // required .hadoop.common.HAServiceStateProto state = 1;
3704    public static final int STATE_FIELD_NUMBER = 1;
3705    private org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceStateProto state_;
3706    /**
3707     * <code>required .hadoop.common.HAServiceStateProto state = 1;</code>
3708     */
3709    public boolean hasState() {
3710      return ((bitField0_ & 0x00000001) == 0x00000001);
3711    }
3712    /**
3713     * <code>required .hadoop.common.HAServiceStateProto state = 1;</code>
3714     */
3715    public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceStateProto getState() {
3716      return state_;
3717    }
3718
3719    // optional bool readyToBecomeActive = 2;
3720    public static final int READYTOBECOMEACTIVE_FIELD_NUMBER = 2;
3721    private boolean readyToBecomeActive_;
3722    /**
3723     * <code>optional bool readyToBecomeActive = 2;</code>
3724     *
3725     * <pre>
3726     * If state is STANDBY, indicate whether it is
3727     * ready to become active.
3728     * </pre>
3729     */
3730    public boolean hasReadyToBecomeActive() {
3731      return ((bitField0_ & 0x00000002) == 0x00000002);
3732    }
3733    /**
3734     * <code>optional bool readyToBecomeActive = 2;</code>
3735     *
3736     * <pre>
3737     * If state is STANDBY, indicate whether it is
3738     * ready to become active.
3739     * </pre>
3740     */
3741    public boolean getReadyToBecomeActive() {
3742      return readyToBecomeActive_;
3743    }
3744
3745    // optional string notReadyReason = 3;
3746    public static final int NOTREADYREASON_FIELD_NUMBER = 3;
3747    private java.lang.Object notReadyReason_;
3748    /**
3749     * <code>optional string notReadyReason = 3;</code>
3750     *
3751     * <pre>
3752     * If not ready to become active, a textual explanation of why not
3753     * </pre>
3754     */
3755    public boolean hasNotReadyReason() {
3756      return ((bitField0_ & 0x00000004) == 0x00000004);
3757    }
3758    /**
3759     * <code>optional string notReadyReason = 3;</code>
3760     *
3761     * <pre>
3762     * If not ready to become active, a textual explanation of why not
3763     * </pre>
3764     */
3765    public java.lang.String getNotReadyReason() {
3766      java.lang.Object ref = notReadyReason_;
3767      if (ref instanceof java.lang.String) {
3768        return (java.lang.String) ref;
3769      } else {
3770        com.google.protobuf.ByteString bs = 
3771            (com.google.protobuf.ByteString) ref;
3772        java.lang.String s = bs.toStringUtf8();
3773        if (bs.isValidUtf8()) {
3774          notReadyReason_ = s;
3775        }
3776        return s;
3777      }
3778    }
3779    /**
3780     * <code>optional string notReadyReason = 3;</code>
3781     *
3782     * <pre>
3783     * If not ready to become active, a textual explanation of why not
3784     * </pre>
3785     */
3786    public com.google.protobuf.ByteString
3787        getNotReadyReasonBytes() {
3788      java.lang.Object ref = notReadyReason_;
3789      if (ref instanceof java.lang.String) {
3790        com.google.protobuf.ByteString b = 
3791            com.google.protobuf.ByteString.copyFromUtf8(
3792                (java.lang.String) ref);
3793        notReadyReason_ = b;
3794        return b;
3795      } else {
3796        return (com.google.protobuf.ByteString) ref;
3797      }
3798    }
3799
3800    private void initFields() {
3801      state_ = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceStateProto.INITIALIZING;
3802      readyToBecomeActive_ = false;
3803      notReadyReason_ = "";
3804    }
3805    private byte memoizedIsInitialized = -1;
3806    public final boolean isInitialized() {
3807      byte isInitialized = memoizedIsInitialized;
3808      if (isInitialized != -1) return isInitialized == 1;
3809
3810      if (!hasState()) {
3811        memoizedIsInitialized = 0;
3812        return false;
3813      }
3814      memoizedIsInitialized = 1;
3815      return true;
3816    }
3817
3818    public void writeTo(com.google.protobuf.CodedOutputStream output)
3819                        throws java.io.IOException {
3820      getSerializedSize();
3821      if (((bitField0_ & 0x00000001) == 0x00000001)) {
3822        output.writeEnum(1, state_.getNumber());
3823      }
3824      if (((bitField0_ & 0x00000002) == 0x00000002)) {
3825        output.writeBool(2, readyToBecomeActive_);
3826      }
3827      if (((bitField0_ & 0x00000004) == 0x00000004)) {
3828        output.writeBytes(3, getNotReadyReasonBytes());
3829      }
3830      getUnknownFields().writeTo(output);
3831    }
3832
3833    private int memoizedSerializedSize = -1;
3834    public int getSerializedSize() {
3835      int size = memoizedSerializedSize;
3836      if (size != -1) return size;
3837
3838      size = 0;
3839      if (((bitField0_ & 0x00000001) == 0x00000001)) {
3840        size += com.google.protobuf.CodedOutputStream
3841          .computeEnumSize(1, state_.getNumber());
3842      }
3843      if (((bitField0_ & 0x00000002) == 0x00000002)) {
3844        size += com.google.protobuf.CodedOutputStream
3845          .computeBoolSize(2, readyToBecomeActive_);
3846      }
3847      if (((bitField0_ & 0x00000004) == 0x00000004)) {
3848        size += com.google.protobuf.CodedOutputStream
3849          .computeBytesSize(3, getNotReadyReasonBytes());
3850      }
3851      size += getUnknownFields().getSerializedSize();
3852      memoizedSerializedSize = size;
3853      return size;
3854    }
3855
3856    private static final long serialVersionUID = 0L;
3857    @java.lang.Override
3858    protected java.lang.Object writeReplace()
3859        throws java.io.ObjectStreamException {
3860      return super.writeReplace();
3861    }
3862
3863    @java.lang.Override
3864    public boolean equals(final java.lang.Object obj) {
3865      if (obj == this) {
3866       return true;
3867      }
3868      if (!(obj instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto)) {
3869        return super.equals(obj);
3870      }
3871      org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto other = (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto) obj;
3872
3873      boolean result = true;
3874      result = result && (hasState() == other.hasState());
3875      if (hasState()) {
3876        result = result &&
3877            (getState() == other.getState());
3878      }
3879      result = result && (hasReadyToBecomeActive() == other.hasReadyToBecomeActive());
3880      if (hasReadyToBecomeActive()) {
3881        result = result && (getReadyToBecomeActive()
3882            == other.getReadyToBecomeActive());
3883      }
3884      result = result && (hasNotReadyReason() == other.hasNotReadyReason());
3885      if (hasNotReadyReason()) {
3886        result = result && getNotReadyReason()
3887            .equals(other.getNotReadyReason());
3888      }
3889      result = result &&
3890          getUnknownFields().equals(other.getUnknownFields());
3891      return result;
3892    }
3893
3894    private int memoizedHashCode = 0;
3895    @java.lang.Override
3896    public int hashCode() {
3897      if (memoizedHashCode != 0) {
3898        return memoizedHashCode;
3899      }
3900      int hash = 41;
3901      hash = (19 * hash) + getDescriptorForType().hashCode();
3902      if (hasState()) {
3903        hash = (37 * hash) + STATE_FIELD_NUMBER;
3904        hash = (53 * hash) + hashEnum(getState());
3905      }
3906      if (hasReadyToBecomeActive()) {
3907        hash = (37 * hash) + READYTOBECOMEACTIVE_FIELD_NUMBER;
3908        hash = (53 * hash) + hashBoolean(getReadyToBecomeActive());
3909      }
3910      if (hasNotReadyReason()) {
3911        hash = (37 * hash) + NOTREADYREASON_FIELD_NUMBER;
3912        hash = (53 * hash) + getNotReadyReason().hashCode();
3913      }
3914      hash = (29 * hash) + getUnknownFields().hashCode();
3915      memoizedHashCode = hash;
3916      return hash;
3917    }
3918
3919    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto parseFrom(
3920        com.google.protobuf.ByteString data)
3921        throws com.google.protobuf.InvalidProtocolBufferException {
3922      return PARSER.parseFrom(data);
3923    }
3924    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto parseFrom(
3925        com.google.protobuf.ByteString data,
3926        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3927        throws com.google.protobuf.InvalidProtocolBufferException {
3928      return PARSER.parseFrom(data, extensionRegistry);
3929    }
3930    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto parseFrom(byte[] data)
3931        throws com.google.protobuf.InvalidProtocolBufferException {
3932      return PARSER.parseFrom(data);
3933    }
3934    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto parseFrom(
3935        byte[] data,
3936        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3937        throws com.google.protobuf.InvalidProtocolBufferException {
3938      return PARSER.parseFrom(data, extensionRegistry);
3939    }
3940    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto parseFrom(java.io.InputStream input)
3941        throws java.io.IOException {
3942      return PARSER.parseFrom(input);
3943    }
3944    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto parseFrom(
3945        java.io.InputStream input,
3946        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3947        throws java.io.IOException {
3948      return PARSER.parseFrom(input, extensionRegistry);
3949    }
3950    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto parseDelimitedFrom(java.io.InputStream input)
3951        throws java.io.IOException {
3952      return PARSER.parseDelimitedFrom(input);
3953    }
3954    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto parseDelimitedFrom(
3955        java.io.InputStream input,
3956        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3957        throws java.io.IOException {
3958      return PARSER.parseDelimitedFrom(input, extensionRegistry);
3959    }
3960    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto parseFrom(
3961        com.google.protobuf.CodedInputStream input)
3962        throws java.io.IOException {
3963      return PARSER.parseFrom(input);
3964    }
3965    public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto parseFrom(
3966        com.google.protobuf.CodedInputStream input,
3967        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3968        throws java.io.IOException {
3969      return PARSER.parseFrom(input, extensionRegistry);
3970    }
3971
3972    public static Builder newBuilder() { return Builder.create(); }
3973    public Builder newBuilderForType() { return newBuilder(); }
3974    public static Builder newBuilder(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto prototype) {
3975      return newBuilder().mergeFrom(prototype);
3976    }
3977    public Builder toBuilder() { return newBuilder(this); }
3978
3979    @java.lang.Override
3980    protected Builder newBuilderForType(
3981        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
3982      Builder builder = new Builder(parent);
3983      return builder;
3984    }
3985    /**
3986     * Protobuf type {@code hadoop.common.GetServiceStatusResponseProto}
3987     *
3988     * <pre>
3989     **
3990     * Returns the state of the service
3991     * </pre>
3992     */
3993    public static final class Builder extends
3994        com.google.protobuf.GeneratedMessage.Builder<Builder>
3995       implements org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProtoOrBuilder {
3996      public static final com.google.protobuf.Descriptors.Descriptor
3997          getDescriptor() {
3998        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_GetServiceStatusResponseProto_descriptor;
3999      }
4000
4001      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
4002          internalGetFieldAccessorTable() {
4003        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_GetServiceStatusResponseProto_fieldAccessorTable
4004            .ensureFieldAccessorsInitialized(
4005                org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto.class, org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto.Builder.class);
4006      }
4007
4008      // Construct using org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto.newBuilder()
4009      private Builder() {
4010        maybeForceBuilderInitialization();
4011      }
4012
4013      private Builder(
4014          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
4015        super(parent);
4016        maybeForceBuilderInitialization();
4017      }
4018      private void maybeForceBuilderInitialization() {
4019        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
4020        }
4021      }
4022      private static Builder create() {
4023        return new Builder();
4024      }
4025
4026      public Builder clear() {
4027        super.clear();
4028        state_ = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceStateProto.INITIALIZING;
4029        bitField0_ = (bitField0_ & ~0x00000001);
4030        readyToBecomeActive_ = false;
4031        bitField0_ = (bitField0_ & ~0x00000002);
4032        notReadyReason_ = "";
4033        bitField0_ = (bitField0_ & ~0x00000004);
4034        return this;
4035      }
4036
4037      public Builder clone() {
4038        return create().mergeFrom(buildPartial());
4039      }
4040
4041      public com.google.protobuf.Descriptors.Descriptor
4042          getDescriptorForType() {
4043        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_hadoop_common_GetServiceStatusResponseProto_descriptor;
4044      }
4045
4046      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto getDefaultInstanceForType() {
4047        return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto.getDefaultInstance();
4048      }
4049
4050      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto build() {
4051        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto result = buildPartial();
4052        if (!result.isInitialized()) {
4053          throw newUninitializedMessageException(result);
4054        }
4055        return result;
4056      }
4057
4058      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto buildPartial() {
4059        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto result = new org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto(this);
4060        int from_bitField0_ = bitField0_;
4061        int to_bitField0_ = 0;
4062        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
4063          to_bitField0_ |= 0x00000001;
4064        }
4065        result.state_ = state_;
4066        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
4067          to_bitField0_ |= 0x00000002;
4068        }
4069        result.readyToBecomeActive_ = readyToBecomeActive_;
4070        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
4071          to_bitField0_ |= 0x00000004;
4072        }
4073        result.notReadyReason_ = notReadyReason_;
4074        result.bitField0_ = to_bitField0_;
4075        onBuilt();
4076        return result;
4077      }
4078
4079      public Builder mergeFrom(com.google.protobuf.Message other) {
4080        if (other instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto) {
4081          return mergeFrom((org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto)other);
4082        } else {
4083          super.mergeFrom(other);
4084          return this;
4085        }
4086      }
4087
4088      public Builder mergeFrom(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto other) {
4089        if (other == org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto.getDefaultInstance()) return this;
4090        if (other.hasState()) {
4091          setState(other.getState());
4092        }
4093        if (other.hasReadyToBecomeActive()) {
4094          setReadyToBecomeActive(other.getReadyToBecomeActive());
4095        }
4096        if (other.hasNotReadyReason()) {
4097          bitField0_ |= 0x00000004;
4098          notReadyReason_ = other.notReadyReason_;
4099          onChanged();
4100        }
4101        this.mergeUnknownFields(other.getUnknownFields());
4102        return this;
4103      }
4104
4105      public final boolean isInitialized() {
4106        if (!hasState()) {
4107          
4108          return false;
4109        }
4110        return true;
4111      }
4112
4113      public Builder mergeFrom(
4114          com.google.protobuf.CodedInputStream input,
4115          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4116          throws java.io.IOException {
4117        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto parsedMessage = null;
4118        try {
4119          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
4120        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
4121          parsedMessage = (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto) e.getUnfinishedMessage();
4122          throw e;
4123        } finally {
4124          if (parsedMessage != null) {
4125            mergeFrom(parsedMessage);
4126          }
4127        }
4128        return this;
4129      }
4130      private int bitField0_;
4131
4132      // required .hadoop.common.HAServiceStateProto state = 1;
4133      private org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceStateProto state_ = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceStateProto.INITIALIZING;
4134      /**
4135       * <code>required .hadoop.common.HAServiceStateProto state = 1;</code>
4136       */
4137      public boolean hasState() {
4138        return ((bitField0_ & 0x00000001) == 0x00000001);
4139      }
4140      /**
4141       * <code>required .hadoop.common.HAServiceStateProto state = 1;</code>
4142       */
4143      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceStateProto getState() {
4144        return state_;
4145      }
4146      /**
4147       * <code>required .hadoop.common.HAServiceStateProto state = 1;</code>
4148       */
4149      public Builder setState(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceStateProto value) {
4150        if (value == null) {
4151          throw new NullPointerException();
4152        }
4153        bitField0_ |= 0x00000001;
4154        state_ = value;
4155        onChanged();
4156        return this;
4157      }
4158      /**
4159       * <code>required .hadoop.common.HAServiceStateProto state = 1;</code>
4160       */
4161      public Builder clearState() {
4162        bitField0_ = (bitField0_ & ~0x00000001);
4163        state_ = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceStateProto.INITIALIZING;
4164        onChanged();
4165        return this;
4166      }
4167
4168      // optional bool readyToBecomeActive = 2;
4169      private boolean readyToBecomeActive_ ;
4170      /**
4171       * <code>optional bool readyToBecomeActive = 2;</code>
4172       *
4173       * <pre>
4174       * If state is STANDBY, indicate whether it is
4175       * ready to become active.
4176       * </pre>
4177       */
4178      public boolean hasReadyToBecomeActive() {
4179        return ((bitField0_ & 0x00000002) == 0x00000002);
4180      }
4181      /**
4182       * <code>optional bool readyToBecomeActive = 2;</code>
4183       *
4184       * <pre>
4185       * If state is STANDBY, indicate whether it is
4186       * ready to become active.
4187       * </pre>
4188       */
4189      public boolean getReadyToBecomeActive() {
4190        return readyToBecomeActive_;
4191      }
4192      /**
4193       * <code>optional bool readyToBecomeActive = 2;</code>
4194       *
4195       * <pre>
4196       * If state is STANDBY, indicate whether it is
4197       * ready to become active.
4198       * </pre>
4199       */
4200      public Builder setReadyToBecomeActive(boolean value) {
4201        bitField0_ |= 0x00000002;
4202        readyToBecomeActive_ = value;
4203        onChanged();
4204        return this;
4205      }
4206      /**
4207       * <code>optional bool readyToBecomeActive = 2;</code>
4208       *
4209       * <pre>
4210       * If state is STANDBY, indicate whether it is
4211       * ready to become active.
4212       * </pre>
4213       */
4214      public Builder clearReadyToBecomeActive() {
4215        bitField0_ = (bitField0_ & ~0x00000002);
4216        readyToBecomeActive_ = false;
4217        onChanged();
4218        return this;
4219      }
4220
4221      // optional string notReadyReason = 3;
4222      private java.lang.Object notReadyReason_ = "";
4223      /**
4224       * <code>optional string notReadyReason = 3;</code>
4225       *
4226       * <pre>
4227       * If not ready to become active, a textual explanation of why not
4228       * </pre>
4229       */
4230      public boolean hasNotReadyReason() {
4231        return ((bitField0_ & 0x00000004) == 0x00000004);
4232      }
4233      /**
4234       * <code>optional string notReadyReason = 3;</code>
4235       *
4236       * <pre>
4237       * If not ready to become active, a textual explanation of why not
4238       * </pre>
4239       */
4240      public java.lang.String getNotReadyReason() {
4241        java.lang.Object ref = notReadyReason_;
4242        if (!(ref instanceof java.lang.String)) {
4243          java.lang.String s = ((com.google.protobuf.ByteString) ref)
4244              .toStringUtf8();
4245          notReadyReason_ = s;
4246          return s;
4247        } else {
4248          return (java.lang.String) ref;
4249        }
4250      }
4251      /**
4252       * <code>optional string notReadyReason = 3;</code>
4253       *
4254       * <pre>
4255       * If not ready to become active, a textual explanation of why not
4256       * </pre>
4257       */
4258      public com.google.protobuf.ByteString
4259          getNotReadyReasonBytes() {
4260        java.lang.Object ref = notReadyReason_;
4261        if (ref instanceof String) {
4262          com.google.protobuf.ByteString b = 
4263              com.google.protobuf.ByteString.copyFromUtf8(
4264                  (java.lang.String) ref);
4265          notReadyReason_ = b;
4266          return b;
4267        } else {
4268          return (com.google.protobuf.ByteString) ref;
4269        }
4270      }
4271      /**
4272       * <code>optional string notReadyReason = 3;</code>
4273       *
4274       * <pre>
4275       * If not ready to become active, a textual explanation of why not
4276       * </pre>
4277       */
4278      public Builder setNotReadyReason(
4279          java.lang.String value) {
4280        if (value == null) {
4281    throw new NullPointerException();
4282  }
4283  bitField0_ |= 0x00000004;
4284        notReadyReason_ = value;
4285        onChanged();
4286        return this;
4287      }
4288      /**
4289       * <code>optional string notReadyReason = 3;</code>
4290       *
4291       * <pre>
4292       * If not ready to become active, a textual explanation of why not
4293       * </pre>
4294       */
4295      public Builder clearNotReadyReason() {
4296        bitField0_ = (bitField0_ & ~0x00000004);
4297        notReadyReason_ = getDefaultInstance().getNotReadyReason();
4298        onChanged();
4299        return this;
4300      }
4301      /**
4302       * <code>optional string notReadyReason = 3;</code>
4303       *
4304       * <pre>
4305       * If not ready to become active, a textual explanation of why not
4306       * </pre>
4307       */
4308      public Builder setNotReadyReasonBytes(
4309          com.google.protobuf.ByteString value) {
4310        if (value == null) {
4311    throw new NullPointerException();
4312  }
4313  bitField0_ |= 0x00000004;
4314        notReadyReason_ = value;
4315        onChanged();
4316        return this;
4317      }
4318
4319      // @@protoc_insertion_point(builder_scope:hadoop.common.GetServiceStatusResponseProto)
4320    }
4321
4322    static {
4323      defaultInstance = new GetServiceStatusResponseProto(true);
4324      defaultInstance.initFields();
4325    }
4326
4327    // @@protoc_insertion_point(class_scope:hadoop.common.GetServiceStatusResponseProto)
4328  }
4329
4330  /**
4331   * Protobuf service {@code hadoop.common.HAServiceProtocolService}
4332   *
4333   * <pre>
4334   **
4335   * Protocol interface provides High availability related 
4336   * primitives to monitor and failover a service.
4337   *
4338   * For details see o.a.h.ha.HAServiceProtocol.
4339   * </pre>
4340   */
4341  public static abstract class HAServiceProtocolService
4342      implements com.google.protobuf.Service {
4343    protected HAServiceProtocolService() {}
4344
4345    public interface Interface {
4346      /**
4347       * <code>rpc monitorHealth(.hadoop.common.MonitorHealthRequestProto) returns (.hadoop.common.MonitorHealthResponseProto);</code>
4348       *
4349       * <pre>
4350       **
4351       * Monitor the health of a service.
4352       * </pre>
4353       */
4354      public abstract void monitorHealth(
4355          com.google.protobuf.RpcController controller,
4356          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto request,
4357          com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto> done);
4358
4359      /**
4360       * <code>rpc transitionToActive(.hadoop.common.TransitionToActiveRequestProto) returns (.hadoop.common.TransitionToActiveResponseProto);</code>
4361       *
4362       * <pre>
4363       **
4364       * Request service to tranisition to active state.
4365       * </pre>
4366       */
4367      public abstract void transitionToActive(
4368          com.google.protobuf.RpcController controller,
4369          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto request,
4370          com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto> done);
4371
4372      /**
4373       * <code>rpc transitionToStandby(.hadoop.common.TransitionToStandbyRequestProto) returns (.hadoop.common.TransitionToStandbyResponseProto);</code>
4374       *
4375       * <pre>
4376       **
4377       * Request service to transition to standby state.
4378       * </pre>
4379       */
4380      public abstract void transitionToStandby(
4381          com.google.protobuf.RpcController controller,
4382          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto request,
4383          com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto> done);
4384
4385      /**
4386       * <code>rpc getServiceStatus(.hadoop.common.GetServiceStatusRequestProto) returns (.hadoop.common.GetServiceStatusResponseProto);</code>
4387       *
4388       * <pre>
4389       **
4390       * Get the current status of the service.
4391       * </pre>
4392       */
4393      public abstract void getServiceStatus(
4394          com.google.protobuf.RpcController controller,
4395          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto request,
4396          com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto> done);
4397
4398    }
4399
4400    public static com.google.protobuf.Service newReflectiveService(
4401        final Interface impl) {
4402      return new HAServiceProtocolService() {
4403        @java.lang.Override
4404        public  void monitorHealth(
4405            com.google.protobuf.RpcController controller,
4406            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto request,
4407            com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto> done) {
4408          impl.monitorHealth(controller, request, done);
4409        }
4410
4411        @java.lang.Override
4412        public  void transitionToActive(
4413            com.google.protobuf.RpcController controller,
4414            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto request,
4415            com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto> done) {
4416          impl.transitionToActive(controller, request, done);
4417        }
4418
4419        @java.lang.Override
4420        public  void transitionToStandby(
4421            com.google.protobuf.RpcController controller,
4422            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto request,
4423            com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto> done) {
4424          impl.transitionToStandby(controller, request, done);
4425        }
4426
4427        @java.lang.Override
4428        public  void getServiceStatus(
4429            com.google.protobuf.RpcController controller,
4430            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto request,
4431            com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto> done) {
4432          impl.getServiceStatus(controller, request, done);
4433        }
4434
4435      };
4436    }
4437
4438    public static com.google.protobuf.BlockingService
4439        newReflectiveBlockingService(final BlockingInterface impl) {
4440      return new com.google.protobuf.BlockingService() {
4441        public final com.google.protobuf.Descriptors.ServiceDescriptor
4442            getDescriptorForType() {
4443          return getDescriptor();
4444        }
4445
4446        public final com.google.protobuf.Message callBlockingMethod(
4447            com.google.protobuf.Descriptors.MethodDescriptor method,
4448            com.google.protobuf.RpcController controller,
4449            com.google.protobuf.Message request)
4450            throws com.google.protobuf.ServiceException {
4451          if (method.getService() != getDescriptor()) {
4452            throw new java.lang.IllegalArgumentException(
4453              "Service.callBlockingMethod() given method descriptor for " +
4454              "wrong service type.");
4455          }
4456          switch(method.getIndex()) {
4457            case 0:
4458              return impl.monitorHealth(controller, (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto)request);
4459            case 1:
4460              return impl.transitionToActive(controller, (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto)request);
4461            case 2:
4462              return impl.transitionToStandby(controller, (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto)request);
4463            case 3:
4464              return impl.getServiceStatus(controller, (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto)request);
4465            default:
4466              throw new java.lang.AssertionError("Can't get here.");
4467          }
4468        }
4469
4470        public final com.google.protobuf.Message
4471            getRequestPrototype(
4472            com.google.protobuf.Descriptors.MethodDescriptor method) {
4473          if (method.getService() != getDescriptor()) {
4474            throw new java.lang.IllegalArgumentException(
4475              "Service.getRequestPrototype() given method " +
4476              "descriptor for wrong service type.");
4477          }
4478          switch(method.getIndex()) {
4479            case 0:
4480              return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto.getDefaultInstance();
4481            case 1:
4482              return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto.getDefaultInstance();
4483            case 2:
4484              return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto.getDefaultInstance();
4485            case 3:
4486              return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto.getDefaultInstance();
4487            default:
4488              throw new java.lang.AssertionError("Can't get here.");
4489          }
4490        }
4491
4492        public final com.google.protobuf.Message
4493            getResponsePrototype(
4494            com.google.protobuf.Descriptors.MethodDescriptor method) {
4495          if (method.getService() != getDescriptor()) {
4496            throw new java.lang.IllegalArgumentException(
4497              "Service.getResponsePrototype() given method " +
4498              "descriptor for wrong service type.");
4499          }
4500          switch(method.getIndex()) {
4501            case 0:
4502              return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto.getDefaultInstance();
4503            case 1:
4504              return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto.getDefaultInstance();
4505            case 2:
4506              return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto.getDefaultInstance();
4507            case 3:
4508              return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto.getDefaultInstance();
4509            default:
4510              throw new java.lang.AssertionError("Can't get here.");
4511          }
4512        }
4513
4514      };
4515    }
4516
4517    /**
4518     * <code>rpc monitorHealth(.hadoop.common.MonitorHealthRequestProto) returns (.hadoop.common.MonitorHealthResponseProto);</code>
4519     *
4520     * <pre>
4521     **
4522     * Monitor the health of a service.
4523     * </pre>
4524     */
4525    public abstract void monitorHealth(
4526        com.google.protobuf.RpcController controller,
4527        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto request,
4528        com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto> done);
4529
4530    /**
4531     * <code>rpc transitionToActive(.hadoop.common.TransitionToActiveRequestProto) returns (.hadoop.common.TransitionToActiveResponseProto);</code>
4532     *
4533     * <pre>
4534     **
4535     * Request service to tranisition to active state.
4536     * </pre>
4537     */
4538    public abstract void transitionToActive(
4539        com.google.protobuf.RpcController controller,
4540        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto request,
4541        com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto> done);
4542
4543    /**
4544     * <code>rpc transitionToStandby(.hadoop.common.TransitionToStandbyRequestProto) returns (.hadoop.common.TransitionToStandbyResponseProto);</code>
4545     *
4546     * <pre>
4547     **
4548     * Request service to transition to standby state.
4549     * </pre>
4550     */
4551    public abstract void transitionToStandby(
4552        com.google.protobuf.RpcController controller,
4553        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto request,
4554        com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto> done);
4555
4556    /**
4557     * <code>rpc getServiceStatus(.hadoop.common.GetServiceStatusRequestProto) returns (.hadoop.common.GetServiceStatusResponseProto);</code>
4558     *
4559     * <pre>
4560     **
4561     * Get the current status of the service.
4562     * </pre>
4563     */
4564    public abstract void getServiceStatus(
4565        com.google.protobuf.RpcController controller,
4566        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto request,
4567        com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto> done);
4568
4569    public static final
4570        com.google.protobuf.Descriptors.ServiceDescriptor
4571        getDescriptor() {
4572      return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.getDescriptor().getServices().get(0);
4573    }
4574    public final com.google.protobuf.Descriptors.ServiceDescriptor
4575        getDescriptorForType() {
4576      return getDescriptor();
4577    }
4578
4579    public final void callMethod(
4580        com.google.protobuf.Descriptors.MethodDescriptor method,
4581        com.google.protobuf.RpcController controller,
4582        com.google.protobuf.Message request,
4583        com.google.protobuf.RpcCallback<
4584          com.google.protobuf.Message> done) {
4585      if (method.getService() != getDescriptor()) {
4586        throw new java.lang.IllegalArgumentException(
4587          "Service.callMethod() given method descriptor for wrong " +
4588          "service type.");
4589      }
4590      switch(method.getIndex()) {
4591        case 0:
4592          this.monitorHealth(controller, (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto)request,
4593            com.google.protobuf.RpcUtil.<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto>specializeCallback(
4594              done));
4595          return;
4596        case 1:
4597          this.transitionToActive(controller, (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto)request,
4598            com.google.protobuf.RpcUtil.<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto>specializeCallback(
4599              done));
4600          return;
4601        case 2:
4602          this.transitionToStandby(controller, (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto)request,
4603            com.google.protobuf.RpcUtil.<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto>specializeCallback(
4604              done));
4605          return;
4606        case 3:
4607          this.getServiceStatus(controller, (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto)request,
4608            com.google.protobuf.RpcUtil.<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto>specializeCallback(
4609              done));
4610          return;
4611        default:
4612          throw new java.lang.AssertionError("Can't get here.");
4613      }
4614    }
4615
4616    public final com.google.protobuf.Message
4617        getRequestPrototype(
4618        com.google.protobuf.Descriptors.MethodDescriptor method) {
4619      if (method.getService() != getDescriptor()) {
4620        throw new java.lang.IllegalArgumentException(
4621          "Service.getRequestPrototype() given method " +
4622          "descriptor for wrong service type.");
4623      }
4624      switch(method.getIndex()) {
4625        case 0:
4626          return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto.getDefaultInstance();
4627        case 1:
4628          return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto.getDefaultInstance();
4629        case 2:
4630          return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto.getDefaultInstance();
4631        case 3:
4632          return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto.getDefaultInstance();
4633        default:
4634          throw new java.lang.AssertionError("Can't get here.");
4635      }
4636    }
4637
4638    public final com.google.protobuf.Message
4639        getResponsePrototype(
4640        com.google.protobuf.Descriptors.MethodDescriptor method) {
4641      if (method.getService() != getDescriptor()) {
4642        throw new java.lang.IllegalArgumentException(
4643          "Service.getResponsePrototype() given method " +
4644          "descriptor for wrong service type.");
4645      }
4646      switch(method.getIndex()) {
4647        case 0:
4648          return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto.getDefaultInstance();
4649        case 1:
4650          return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto.getDefaultInstance();
4651        case 2:
4652          return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto.getDefaultInstance();
4653        case 3:
4654          return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto.getDefaultInstance();
4655        default:
4656          throw new java.lang.AssertionError("Can't get here.");
4657      }
4658    }
4659
4660    public static Stub newStub(
4661        com.google.protobuf.RpcChannel channel) {
4662      return new Stub(channel);
4663    }
4664
4665    public static final class Stub extends org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceProtocolService implements Interface {
4666      private Stub(com.google.protobuf.RpcChannel channel) {
4667        this.channel = channel;
4668      }
4669
4670      private final com.google.protobuf.RpcChannel channel;
4671
4672      public com.google.protobuf.RpcChannel getChannel() {
4673        return channel;
4674      }
4675
4676      public  void monitorHealth(
4677          com.google.protobuf.RpcController controller,
4678          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto request,
4679          com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto> done) {
4680        channel.callMethod(
4681          getDescriptor().getMethods().get(0),
4682          controller,
4683          request,
4684          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto.getDefaultInstance(),
4685          com.google.protobuf.RpcUtil.generalizeCallback(
4686            done,
4687            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto.class,
4688            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto.getDefaultInstance()));
4689      }
4690
4691      public  void transitionToActive(
4692          com.google.protobuf.RpcController controller,
4693          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto request,
4694          com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto> done) {
4695        channel.callMethod(
4696          getDescriptor().getMethods().get(1),
4697          controller,
4698          request,
4699          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto.getDefaultInstance(),
4700          com.google.protobuf.RpcUtil.generalizeCallback(
4701            done,
4702            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto.class,
4703            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto.getDefaultInstance()));
4704      }
4705
4706      public  void transitionToStandby(
4707          com.google.protobuf.RpcController controller,
4708          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto request,
4709          com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto> done) {
4710        channel.callMethod(
4711          getDescriptor().getMethods().get(2),
4712          controller,
4713          request,
4714          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto.getDefaultInstance(),
4715          com.google.protobuf.RpcUtil.generalizeCallback(
4716            done,
4717            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto.class,
4718            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto.getDefaultInstance()));
4719      }
4720
4721      public  void getServiceStatus(
4722          com.google.protobuf.RpcController controller,
4723          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto request,
4724          com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto> done) {
4725        channel.callMethod(
4726          getDescriptor().getMethods().get(3),
4727          controller,
4728          request,
4729          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto.getDefaultInstance(),
4730          com.google.protobuf.RpcUtil.generalizeCallback(
4731            done,
4732            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto.class,
4733            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto.getDefaultInstance()));
4734      }
4735    }
4736
4737    public static BlockingInterface newBlockingStub(
4738        com.google.protobuf.BlockingRpcChannel channel) {
4739      return new BlockingStub(channel);
4740    }
4741
4742    public interface BlockingInterface {
4743      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto monitorHealth(
4744          com.google.protobuf.RpcController controller,
4745          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto request)
4746          throws com.google.protobuf.ServiceException;
4747
4748      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto transitionToActive(
4749          com.google.protobuf.RpcController controller,
4750          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto request)
4751          throws com.google.protobuf.ServiceException;
4752
4753      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto transitionToStandby(
4754          com.google.protobuf.RpcController controller,
4755          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto request)
4756          throws com.google.protobuf.ServiceException;
4757
4758      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto getServiceStatus(
4759          com.google.protobuf.RpcController controller,
4760          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto request)
4761          throws com.google.protobuf.ServiceException;
4762    }
4763
4764    private static final class BlockingStub implements BlockingInterface {
4765      private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
4766        this.channel = channel;
4767      }
4768
4769      private final com.google.protobuf.BlockingRpcChannel channel;
4770
4771      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto monitorHealth(
4772          com.google.protobuf.RpcController controller,
4773          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto request)
4774          throws com.google.protobuf.ServiceException {
4775        return (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto) channel.callBlockingMethod(
4776          getDescriptor().getMethods().get(0),
4777          controller,
4778          request,
4779          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto.getDefaultInstance());
4780      }
4781
4782
4783      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto transitionToActive(
4784          com.google.protobuf.RpcController controller,
4785          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto request)
4786          throws com.google.protobuf.ServiceException {
4787        return (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto) channel.callBlockingMethod(
4788          getDescriptor().getMethods().get(1),
4789          controller,
4790          request,
4791          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto.getDefaultInstance());
4792      }
4793
4794
4795      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto transitionToStandby(
4796          com.google.protobuf.RpcController controller,
4797          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto request)
4798          throws com.google.protobuf.ServiceException {
4799        return (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto) channel.callBlockingMethod(
4800          getDescriptor().getMethods().get(2),
4801          controller,
4802          request,
4803          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto.getDefaultInstance());
4804      }
4805
4806
4807      public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto getServiceStatus(
4808          com.google.protobuf.RpcController controller,
4809          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto request)
4810          throws com.google.protobuf.ServiceException {
4811        return (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto) channel.callBlockingMethod(
4812          getDescriptor().getMethods().get(3),
4813          controller,
4814          request,
4815          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto.getDefaultInstance());
4816      }
4817
4818    }
4819
4820    // @@protoc_insertion_point(class_scope:hadoop.common.HAServiceProtocolService)
4821  }
4822
4823  private static com.google.protobuf.Descriptors.Descriptor
4824    internal_static_hadoop_common_HAStateChangeRequestInfoProto_descriptor;
4825  private static
4826    com.google.protobuf.GeneratedMessage.FieldAccessorTable
4827      internal_static_hadoop_common_HAStateChangeRequestInfoProto_fieldAccessorTable;
4828  private static com.google.protobuf.Descriptors.Descriptor
4829    internal_static_hadoop_common_MonitorHealthRequestProto_descriptor;
4830  private static
4831    com.google.protobuf.GeneratedMessage.FieldAccessorTable
4832      internal_static_hadoop_common_MonitorHealthRequestProto_fieldAccessorTable;
4833  private static com.google.protobuf.Descriptors.Descriptor
4834    internal_static_hadoop_common_MonitorHealthResponseProto_descriptor;
4835  private static
4836    com.google.protobuf.GeneratedMessage.FieldAccessorTable
4837      internal_static_hadoop_common_MonitorHealthResponseProto_fieldAccessorTable;
4838  private static com.google.protobuf.Descriptors.Descriptor
4839    internal_static_hadoop_common_TransitionToActiveRequestProto_descriptor;
4840  private static
4841    com.google.protobuf.GeneratedMessage.FieldAccessorTable
4842      internal_static_hadoop_common_TransitionToActiveRequestProto_fieldAccessorTable;
4843  private static com.google.protobuf.Descriptors.Descriptor
4844    internal_static_hadoop_common_TransitionToActiveResponseProto_descriptor;
4845  private static
4846    com.google.protobuf.GeneratedMessage.FieldAccessorTable
4847      internal_static_hadoop_common_TransitionToActiveResponseProto_fieldAccessorTable;
4848  private static com.google.protobuf.Descriptors.Descriptor
4849    internal_static_hadoop_common_TransitionToStandbyRequestProto_descriptor;
4850  private static
4851    com.google.protobuf.GeneratedMessage.FieldAccessorTable
4852      internal_static_hadoop_common_TransitionToStandbyRequestProto_fieldAccessorTable;
4853  private static com.google.protobuf.Descriptors.Descriptor
4854    internal_static_hadoop_common_TransitionToStandbyResponseProto_descriptor;
4855  private static
4856    com.google.protobuf.GeneratedMessage.FieldAccessorTable
4857      internal_static_hadoop_common_TransitionToStandbyResponseProto_fieldAccessorTable;
4858  private static com.google.protobuf.Descriptors.Descriptor
4859    internal_static_hadoop_common_GetServiceStatusRequestProto_descriptor;
4860  private static
4861    com.google.protobuf.GeneratedMessage.FieldAccessorTable
4862      internal_static_hadoop_common_GetServiceStatusRequestProto_fieldAccessorTable;
4863  private static com.google.protobuf.Descriptors.Descriptor
4864    internal_static_hadoop_common_GetServiceStatusResponseProto_descriptor;
4865  private static
4866    com.google.protobuf.GeneratedMessage.FieldAccessorTable
4867      internal_static_hadoop_common_GetServiceStatusResponseProto_fieldAccessorTable;
4868
4869  public static com.google.protobuf.Descriptors.FileDescriptor
4870      getDescriptor() {
4871    return descriptor;
4872  }
4873  private static com.google.protobuf.Descriptors.FileDescriptor
4874      descriptor;
4875  static {
4876    java.lang.String[] descriptorData = {
4877      "\n\027HAServiceProtocol.proto\022\rhadoop.common" +
4878      "\"R\n\035HAStateChangeRequestInfoProto\0221\n\treq" +
4879      "Source\030\001 \002(\0162\036.hadoop.common.HARequestSo" +
4880      "urce\"\033\n\031MonitorHealthRequestProto\"\034\n\032Mon" +
4881      "itorHealthResponseProto\"_\n\036TransitionToA" +
4882      "ctiveRequestProto\022=\n\007reqInfo\030\001 \002(\0132,.had" +
4883      "oop.common.HAStateChangeRequestInfoProto" +
4884      "\"!\n\037TransitionToActiveResponseProto\"`\n\037T" +
4885      "ransitionToStandbyRequestProto\022=\n\007reqInf" +
4886      "o\030\001 \002(\0132,.hadoop.common.HAStateChangeReq",
4887      "uestInfoProto\"\"\n TransitionToStandbyResp" +
4888      "onseProto\"\036\n\034GetServiceStatusRequestProt" +
4889      "o\"\207\001\n\035GetServiceStatusResponseProto\0221\n\005s" +
4890      "tate\030\001 \002(\0162\".hadoop.common.HAServiceStat" +
4891      "eProto\022\033\n\023readyToBecomeActive\030\002 \001(\010\022\026\n\016n" +
4892      "otReadyReason\030\003 \001(\t*@\n\023HAServiceStatePro" +
4893      "to\022\020\n\014INITIALIZING\020\000\022\n\n\006ACTIVE\020\001\022\013\n\007STAN" +
4894      "DBY\020\002*W\n\017HARequestSource\022\023\n\017REQUEST_BY_U" +
4895      "SER\020\000\022\032\n\026REQUEST_BY_USER_FORCED\020\001\022\023\n\017REQ" +
4896      "UEST_BY_ZKFC\020\0022\334\003\n\030HAServiceProtocolServ",
4897      "ice\022d\n\rmonitorHealth\022(.hadoop.common.Mon" +
4898      "itorHealthRequestProto\032).hadoop.common.M" +
4899      "onitorHealthResponseProto\022s\n\022transitionT" +
4900      "oActive\022-.hadoop.common.TransitionToActi" +
4901      "veRequestProto\032..hadoop.common.Transitio" +
4902      "nToActiveResponseProto\022v\n\023transitionToSt" +
4903      "andby\022..hadoop.common.TransitionToStandb" +
4904      "yRequestProto\032/.hadoop.common.Transition" +
4905      "ToStandbyResponseProto\022m\n\020getServiceStat" +
4906      "us\022+.hadoop.common.GetServiceStatusReque",
4907      "stProto\032,.hadoop.common.GetServiceStatus" +
4908      "ResponseProtoB;\n\032org.apache.hadoop.ha.pr" +
4909      "otoB\027HAServiceProtocolProtos\210\001\001\240\001\001"
4910    };
4911    com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
4912      new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
4913        public com.google.protobuf.ExtensionRegistry assignDescriptors(
4914            com.google.protobuf.Descriptors.FileDescriptor root) {
4915          descriptor = root;
4916          internal_static_hadoop_common_HAStateChangeRequestInfoProto_descriptor =
4917            getDescriptor().getMessageTypes().get(0);
4918          internal_static_hadoop_common_HAStateChangeRequestInfoProto_fieldAccessorTable = new
4919            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
4920              internal_static_hadoop_common_HAStateChangeRequestInfoProto_descriptor,
4921              new java.lang.String[] { "ReqSource", });
4922          internal_static_hadoop_common_MonitorHealthRequestProto_descriptor =
4923            getDescriptor().getMessageTypes().get(1);
4924          internal_static_hadoop_common_MonitorHealthRequestProto_fieldAccessorTable = new
4925            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
4926              internal_static_hadoop_common_MonitorHealthRequestProto_descriptor,
4927              new java.lang.String[] { });
4928          internal_static_hadoop_common_MonitorHealthResponseProto_descriptor =
4929            getDescriptor().getMessageTypes().get(2);
4930          internal_static_hadoop_common_MonitorHealthResponseProto_fieldAccessorTable = new
4931            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
4932              internal_static_hadoop_common_MonitorHealthResponseProto_descriptor,
4933              new java.lang.String[] { });
4934          internal_static_hadoop_common_TransitionToActiveRequestProto_descriptor =
4935            getDescriptor().getMessageTypes().get(3);
4936          internal_static_hadoop_common_TransitionToActiveRequestProto_fieldAccessorTable = new
4937            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
4938              internal_static_hadoop_common_TransitionToActiveRequestProto_descriptor,
4939              new java.lang.String[] { "ReqInfo", });
4940          internal_static_hadoop_common_TransitionToActiveResponseProto_descriptor =
4941            getDescriptor().getMessageTypes().get(4);
4942          internal_static_hadoop_common_TransitionToActiveResponseProto_fieldAccessorTable = new
4943            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
4944              internal_static_hadoop_common_TransitionToActiveResponseProto_descriptor,
4945              new java.lang.String[] { });
4946          internal_static_hadoop_common_TransitionToStandbyRequestProto_descriptor =
4947            getDescriptor().getMessageTypes().get(5);
4948          internal_static_hadoop_common_TransitionToStandbyRequestProto_fieldAccessorTable = new
4949            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
4950              internal_static_hadoop_common_TransitionToStandbyRequestProto_descriptor,
4951              new java.lang.String[] { "ReqInfo", });
4952          internal_static_hadoop_common_TransitionToStandbyResponseProto_descriptor =
4953            getDescriptor().getMessageTypes().get(6);
4954          internal_static_hadoop_common_TransitionToStandbyResponseProto_fieldAccessorTable = new
4955            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
4956              internal_static_hadoop_common_TransitionToStandbyResponseProto_descriptor,
4957              new java.lang.String[] { });
4958          internal_static_hadoop_common_GetServiceStatusRequestProto_descriptor =
4959            getDescriptor().getMessageTypes().get(7);
4960          internal_static_hadoop_common_GetServiceStatusRequestProto_fieldAccessorTable = new
4961            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
4962              internal_static_hadoop_common_GetServiceStatusRequestProto_descriptor,
4963              new java.lang.String[] { });
4964          internal_static_hadoop_common_GetServiceStatusResponseProto_descriptor =
4965            getDescriptor().getMessageTypes().get(8);
4966          internal_static_hadoop_common_GetServiceStatusResponseProto_fieldAccessorTable = new
4967            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
4968              internal_static_hadoop_common_GetServiceStatusResponseProto_descriptor,
4969              new java.lang.String[] { "State", "ReadyToBecomeActive", "NotReadyReason", });
4970          return null;
4971        }
4972      };
4973    com.google.protobuf.Descriptors.FileDescriptor
4974      .internalBuildGeneratedFileFrom(descriptorData,
4975        new com.google.protobuf.Descriptors.FileDescriptor[] {
4976        }, assigner);
4977  }
4978
4979  // @@protoc_insertion_point(outer_class_scope)
4980}