001    // Generated by the protocol buffer compiler.  DO NOT EDIT!
002    // source: ZKFCProtocol.proto
003    
004    package org.apache.hadoop.ha.proto;
005    
006    public final class ZKFCProtocolProtos {
007      private ZKFCProtocolProtos() {}
008      public static void registerAllExtensions(
009          com.google.protobuf.ExtensionRegistry registry) {
010      }
011      public interface CedeActiveRequestProtoOrBuilder
012          extends com.google.protobuf.MessageOrBuilder {
013    
014        // required uint32 millisToCede = 1;
015        /**
016         * <code>required uint32 millisToCede = 1;</code>
017         */
018        boolean hasMillisToCede();
019        /**
020         * <code>required uint32 millisToCede = 1;</code>
021         */
022        int getMillisToCede();
023      }
024      /**
025       * Protobuf type {@code hadoop.common.CedeActiveRequestProto}
026       */
027      public static final class CedeActiveRequestProto extends
028          com.google.protobuf.GeneratedMessage
029          implements CedeActiveRequestProtoOrBuilder {
030        // Use CedeActiveRequestProto.newBuilder() to construct.
031        private CedeActiveRequestProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
032          super(builder);
033          this.unknownFields = builder.getUnknownFields();
034        }
035        private CedeActiveRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
036    
037        private static final CedeActiveRequestProto defaultInstance;
038        public static CedeActiveRequestProto getDefaultInstance() {
039          return defaultInstance;
040        }
041    
042        public CedeActiveRequestProto getDefaultInstanceForType() {
043          return defaultInstance;
044        }
045    
046        private final com.google.protobuf.UnknownFieldSet unknownFields;
047        @java.lang.Override
048        public final com.google.protobuf.UnknownFieldSet
049            getUnknownFields() {
050          return this.unknownFields;
051        }
052        private CedeActiveRequestProto(
053            com.google.protobuf.CodedInputStream input,
054            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
055            throws com.google.protobuf.InvalidProtocolBufferException {
056          initFields();
057          int mutable_bitField0_ = 0;
058          com.google.protobuf.UnknownFieldSet.Builder unknownFields =
059              com.google.protobuf.UnknownFieldSet.newBuilder();
060          try {
061            boolean done = false;
062            while (!done) {
063              int tag = input.readTag();
064              switch (tag) {
065                case 0:
066                  done = true;
067                  break;
068                default: {
069                  if (!parseUnknownField(input, unknownFields,
070                                         extensionRegistry, tag)) {
071                    done = true;
072                  }
073                  break;
074                }
075                case 8: {
076                  bitField0_ |= 0x00000001;
077                  millisToCede_ = input.readUInt32();
078                  break;
079                }
080              }
081            }
082          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
083            throw e.setUnfinishedMessage(this);
084          } catch (java.io.IOException e) {
085            throw new com.google.protobuf.InvalidProtocolBufferException(
086                e.getMessage()).setUnfinishedMessage(this);
087          } finally {
088            this.unknownFields = unknownFields.build();
089            makeExtensionsImmutable();
090          }
091        }
092        public static final com.google.protobuf.Descriptors.Descriptor
093            getDescriptor() {
094          return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.internal_static_hadoop_common_CedeActiveRequestProto_descriptor;
095        }
096    
097        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
098            internalGetFieldAccessorTable() {
099          return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.internal_static_hadoop_common_CedeActiveRequestProto_fieldAccessorTable
100              .ensureFieldAccessorsInitialized(
101                  org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto.class, org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto.Builder.class);
102        }
103    
104        public static com.google.protobuf.Parser<CedeActiveRequestProto> PARSER =
105            new com.google.protobuf.AbstractParser<CedeActiveRequestProto>() {
106          public CedeActiveRequestProto parsePartialFrom(
107              com.google.protobuf.CodedInputStream input,
108              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
109              throws com.google.protobuf.InvalidProtocolBufferException {
110            return new CedeActiveRequestProto(input, extensionRegistry);
111          }
112        };
113    
114        @java.lang.Override
115        public com.google.protobuf.Parser<CedeActiveRequestProto> getParserForType() {
116          return PARSER;
117        }
118    
119        private int bitField0_;
120        // required uint32 millisToCede = 1;
121        public static final int MILLISTOCEDE_FIELD_NUMBER = 1;
122        private int millisToCede_;
123        /**
124         * <code>required uint32 millisToCede = 1;</code>
125         */
126        public boolean hasMillisToCede() {
127          return ((bitField0_ & 0x00000001) == 0x00000001);
128        }
129        /**
130         * <code>required uint32 millisToCede = 1;</code>
131         */
132        public int getMillisToCede() {
133          return millisToCede_;
134        }
135    
136        private void initFields() {
137          millisToCede_ = 0;
138        }
139        private byte memoizedIsInitialized = -1;
140        public final boolean isInitialized() {
141          byte isInitialized = memoizedIsInitialized;
142          if (isInitialized != -1) return isInitialized == 1;
143    
144          if (!hasMillisToCede()) {
145            memoizedIsInitialized = 0;
146            return false;
147          }
148          memoizedIsInitialized = 1;
149          return true;
150        }
151    
152        public void writeTo(com.google.protobuf.CodedOutputStream output)
153                            throws java.io.IOException {
154          getSerializedSize();
155          if (((bitField0_ & 0x00000001) == 0x00000001)) {
156            output.writeUInt32(1, millisToCede_);
157          }
158          getUnknownFields().writeTo(output);
159        }
160    
161        private int memoizedSerializedSize = -1;
162        public int getSerializedSize() {
163          int size = memoizedSerializedSize;
164          if (size != -1) return size;
165    
166          size = 0;
167          if (((bitField0_ & 0x00000001) == 0x00000001)) {
168            size += com.google.protobuf.CodedOutputStream
169              .computeUInt32Size(1, millisToCede_);
170          }
171          size += getUnknownFields().getSerializedSize();
172          memoizedSerializedSize = size;
173          return size;
174        }
175    
176        private static final long serialVersionUID = 0L;
177        @java.lang.Override
178        protected java.lang.Object writeReplace()
179            throws java.io.ObjectStreamException {
180          return super.writeReplace();
181        }
182    
183        @java.lang.Override
184        public boolean equals(final java.lang.Object obj) {
185          if (obj == this) {
186           return true;
187          }
188          if (!(obj instanceof org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto)) {
189            return super.equals(obj);
190          }
191          org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto other = (org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto) obj;
192    
193          boolean result = true;
194          result = result && (hasMillisToCede() == other.hasMillisToCede());
195          if (hasMillisToCede()) {
196            result = result && (getMillisToCede()
197                == other.getMillisToCede());
198          }
199          result = result &&
200              getUnknownFields().equals(other.getUnknownFields());
201          return result;
202        }
203    
204        private int memoizedHashCode = 0;
205        @java.lang.Override
206        public int hashCode() {
207          if (memoizedHashCode != 0) {
208            return memoizedHashCode;
209          }
210          int hash = 41;
211          hash = (19 * hash) + getDescriptorForType().hashCode();
212          if (hasMillisToCede()) {
213            hash = (37 * hash) + MILLISTOCEDE_FIELD_NUMBER;
214            hash = (53 * hash) + getMillisToCede();
215          }
216          hash = (29 * hash) + getUnknownFields().hashCode();
217          memoizedHashCode = hash;
218          return hash;
219        }
220    
221        public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto parseFrom(
222            com.google.protobuf.ByteString data)
223            throws com.google.protobuf.InvalidProtocolBufferException {
224          return PARSER.parseFrom(data);
225        }
226        public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto parseFrom(
227            com.google.protobuf.ByteString data,
228            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
229            throws com.google.protobuf.InvalidProtocolBufferException {
230          return PARSER.parseFrom(data, extensionRegistry);
231        }
232        public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto parseFrom(byte[] data)
233            throws com.google.protobuf.InvalidProtocolBufferException {
234          return PARSER.parseFrom(data);
235        }
236        public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto parseFrom(
237            byte[] data,
238            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
239            throws com.google.protobuf.InvalidProtocolBufferException {
240          return PARSER.parseFrom(data, extensionRegistry);
241        }
242        public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto parseFrom(java.io.InputStream input)
243            throws java.io.IOException {
244          return PARSER.parseFrom(input);
245        }
246        public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto parseFrom(
247            java.io.InputStream input,
248            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
249            throws java.io.IOException {
250          return PARSER.parseFrom(input, extensionRegistry);
251        }
252        public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto parseDelimitedFrom(java.io.InputStream input)
253            throws java.io.IOException {
254          return PARSER.parseDelimitedFrom(input);
255        }
256        public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto parseDelimitedFrom(
257            java.io.InputStream input,
258            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
259            throws java.io.IOException {
260          return PARSER.parseDelimitedFrom(input, extensionRegistry);
261        }
262        public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto parseFrom(
263            com.google.protobuf.CodedInputStream input)
264            throws java.io.IOException {
265          return PARSER.parseFrom(input);
266        }
267        public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto parseFrom(
268            com.google.protobuf.CodedInputStream input,
269            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
270            throws java.io.IOException {
271          return PARSER.parseFrom(input, extensionRegistry);
272        }
273    
274        public static Builder newBuilder() { return Builder.create(); }
275        public Builder newBuilderForType() { return newBuilder(); }
276        public static Builder newBuilder(org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto prototype) {
277          return newBuilder().mergeFrom(prototype);
278        }
279        public Builder toBuilder() { return newBuilder(this); }
280    
281        @java.lang.Override
282        protected Builder newBuilderForType(
283            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
284          Builder builder = new Builder(parent);
285          return builder;
286        }
287        /**
288         * Protobuf type {@code hadoop.common.CedeActiveRequestProto}
289         */
290        public static final class Builder extends
291            com.google.protobuf.GeneratedMessage.Builder<Builder>
292           implements org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProtoOrBuilder {
293          public static final com.google.protobuf.Descriptors.Descriptor
294              getDescriptor() {
295            return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.internal_static_hadoop_common_CedeActiveRequestProto_descriptor;
296          }
297    
298          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
299              internalGetFieldAccessorTable() {
300            return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.internal_static_hadoop_common_CedeActiveRequestProto_fieldAccessorTable
301                .ensureFieldAccessorsInitialized(
302                    org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto.class, org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto.Builder.class);
303          }
304    
305          // Construct using org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto.newBuilder()
306          private Builder() {
307            maybeForceBuilderInitialization();
308          }
309    
310          private Builder(
311              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
312            super(parent);
313            maybeForceBuilderInitialization();
314          }
315          private void maybeForceBuilderInitialization() {
316            if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
317            }
318          }
319          private static Builder create() {
320            return new Builder();
321          }
322    
323          public Builder clear() {
324            super.clear();
325            millisToCede_ = 0;
326            bitField0_ = (bitField0_ & ~0x00000001);
327            return this;
328          }
329    
330          public Builder clone() {
331            return create().mergeFrom(buildPartial());
332          }
333    
334          public com.google.protobuf.Descriptors.Descriptor
335              getDescriptorForType() {
336            return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.internal_static_hadoop_common_CedeActiveRequestProto_descriptor;
337          }
338    
339          public org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto getDefaultInstanceForType() {
340            return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto.getDefaultInstance();
341          }
342    
343          public org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto build() {
344            org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto result = buildPartial();
345            if (!result.isInitialized()) {
346              throw newUninitializedMessageException(result);
347            }
348            return result;
349          }
350    
351          public org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto buildPartial() {
352            org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto result = new org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto(this);
353            int from_bitField0_ = bitField0_;
354            int to_bitField0_ = 0;
355            if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
356              to_bitField0_ |= 0x00000001;
357            }
358            result.millisToCede_ = millisToCede_;
359            result.bitField0_ = to_bitField0_;
360            onBuilt();
361            return result;
362          }
363    
364          public Builder mergeFrom(com.google.protobuf.Message other) {
365            if (other instanceof org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto) {
366              return mergeFrom((org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto)other);
367            } else {
368              super.mergeFrom(other);
369              return this;
370            }
371          }
372    
373          public Builder mergeFrom(org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto other) {
374            if (other == org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto.getDefaultInstance()) return this;
375            if (other.hasMillisToCede()) {
376              setMillisToCede(other.getMillisToCede());
377            }
378            this.mergeUnknownFields(other.getUnknownFields());
379            return this;
380          }
381    
382          public final boolean isInitialized() {
383            if (!hasMillisToCede()) {
384              
385              return false;
386            }
387            return true;
388          }
389    
390          public Builder mergeFrom(
391              com.google.protobuf.CodedInputStream input,
392              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
393              throws java.io.IOException {
394            org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto parsedMessage = null;
395            try {
396              parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
397            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
398              parsedMessage = (org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto) e.getUnfinishedMessage();
399              throw e;
400            } finally {
401              if (parsedMessage != null) {
402                mergeFrom(parsedMessage);
403              }
404            }
405            return this;
406          }
407          private int bitField0_;
408    
409          // required uint32 millisToCede = 1;
410          private int millisToCede_ ;
411          /**
412           * <code>required uint32 millisToCede = 1;</code>
413           */
414          public boolean hasMillisToCede() {
415            return ((bitField0_ & 0x00000001) == 0x00000001);
416          }
417          /**
418           * <code>required uint32 millisToCede = 1;</code>
419           */
420          public int getMillisToCede() {
421            return millisToCede_;
422          }
423          /**
424           * <code>required uint32 millisToCede = 1;</code>
425           */
426          public Builder setMillisToCede(int value) {
427            bitField0_ |= 0x00000001;
428            millisToCede_ = value;
429            onChanged();
430            return this;
431          }
432          /**
433           * <code>required uint32 millisToCede = 1;</code>
434           */
435          public Builder clearMillisToCede() {
436            bitField0_ = (bitField0_ & ~0x00000001);
437            millisToCede_ = 0;
438            onChanged();
439            return this;
440          }
441    
442          // @@protoc_insertion_point(builder_scope:hadoop.common.CedeActiveRequestProto)
443        }
444    
445        static {
446          defaultInstance = new CedeActiveRequestProto(true);
447          defaultInstance.initFields();
448        }
449    
450        // @@protoc_insertion_point(class_scope:hadoop.common.CedeActiveRequestProto)
451      }
452    
453      public interface CedeActiveResponseProtoOrBuilder
454          extends com.google.protobuf.MessageOrBuilder {
455      }
456      /**
457       * Protobuf type {@code hadoop.common.CedeActiveResponseProto}
458       */
459      public static final class CedeActiveResponseProto extends
460          com.google.protobuf.GeneratedMessage
461          implements CedeActiveResponseProtoOrBuilder {
462        // Use CedeActiveResponseProto.newBuilder() to construct.
463        private CedeActiveResponseProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
464          super(builder);
465          this.unknownFields = builder.getUnknownFields();
466        }
467        private CedeActiveResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
468    
469        private static final CedeActiveResponseProto defaultInstance;
470        public static CedeActiveResponseProto getDefaultInstance() {
471          return defaultInstance;
472        }
473    
474        public CedeActiveResponseProto getDefaultInstanceForType() {
475          return defaultInstance;
476        }
477    
478        private final com.google.protobuf.UnknownFieldSet unknownFields;
479        @java.lang.Override
480        public final com.google.protobuf.UnknownFieldSet
481            getUnknownFields() {
482          return this.unknownFields;
483        }
484        private CedeActiveResponseProto(
485            com.google.protobuf.CodedInputStream input,
486            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
487            throws com.google.protobuf.InvalidProtocolBufferException {
488          initFields();
489          com.google.protobuf.UnknownFieldSet.Builder unknownFields =
490              com.google.protobuf.UnknownFieldSet.newBuilder();
491          try {
492            boolean done = false;
493            while (!done) {
494              int tag = input.readTag();
495              switch (tag) {
496                case 0:
497                  done = true;
498                  break;
499                default: {
500                  if (!parseUnknownField(input, unknownFields,
501                                         extensionRegistry, tag)) {
502                    done = true;
503                  }
504                  break;
505                }
506              }
507            }
508          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
509            throw e.setUnfinishedMessage(this);
510          } catch (java.io.IOException e) {
511            throw new com.google.protobuf.InvalidProtocolBufferException(
512                e.getMessage()).setUnfinishedMessage(this);
513          } finally {
514            this.unknownFields = unknownFields.build();
515            makeExtensionsImmutable();
516          }
517        }
518        public static final com.google.protobuf.Descriptors.Descriptor
519            getDescriptor() {
520          return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.internal_static_hadoop_common_CedeActiveResponseProto_descriptor;
521        }
522    
523        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
524            internalGetFieldAccessorTable() {
525          return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.internal_static_hadoop_common_CedeActiveResponseProto_fieldAccessorTable
526              .ensureFieldAccessorsInitialized(
527                  org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto.class, org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto.Builder.class);
528        }
529    
530        public static com.google.protobuf.Parser<CedeActiveResponseProto> PARSER =
531            new com.google.protobuf.AbstractParser<CedeActiveResponseProto>() {
532          public CedeActiveResponseProto parsePartialFrom(
533              com.google.protobuf.CodedInputStream input,
534              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
535              throws com.google.protobuf.InvalidProtocolBufferException {
536            return new CedeActiveResponseProto(input, extensionRegistry);
537          }
538        };
539    
540        @java.lang.Override
541        public com.google.protobuf.Parser<CedeActiveResponseProto> getParserForType() {
542          return PARSER;
543        }
544    
545        private void initFields() {
546        }
547        private byte memoizedIsInitialized = -1;
548        public final boolean isInitialized() {
549          byte isInitialized = memoizedIsInitialized;
550          if (isInitialized != -1) return isInitialized == 1;
551    
552          memoizedIsInitialized = 1;
553          return true;
554        }
555    
556        public void writeTo(com.google.protobuf.CodedOutputStream output)
557                            throws java.io.IOException {
558          getSerializedSize();
559          getUnknownFields().writeTo(output);
560        }
561    
562        private int memoizedSerializedSize = -1;
563        public int getSerializedSize() {
564          int size = memoizedSerializedSize;
565          if (size != -1) return size;
566    
567          size = 0;
568          size += getUnknownFields().getSerializedSize();
569          memoizedSerializedSize = size;
570          return size;
571        }
572    
573        private static final long serialVersionUID = 0L;
574        @java.lang.Override
575        protected java.lang.Object writeReplace()
576            throws java.io.ObjectStreamException {
577          return super.writeReplace();
578        }
579    
580        @java.lang.Override
581        public boolean equals(final java.lang.Object obj) {
582          if (obj == this) {
583           return true;
584          }
585          if (!(obj instanceof org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto)) {
586            return super.equals(obj);
587          }
588          org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto other = (org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto) obj;
589    
590          boolean result = true;
591          result = result &&
592              getUnknownFields().equals(other.getUnknownFields());
593          return result;
594        }
595    
596        private int memoizedHashCode = 0;
597        @java.lang.Override
598        public int hashCode() {
599          if (memoizedHashCode != 0) {
600            return memoizedHashCode;
601          }
602          int hash = 41;
603          hash = (19 * hash) + getDescriptorForType().hashCode();
604          hash = (29 * hash) + getUnknownFields().hashCode();
605          memoizedHashCode = hash;
606          return hash;
607        }
608    
609        public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto parseFrom(
610            com.google.protobuf.ByteString data)
611            throws com.google.protobuf.InvalidProtocolBufferException {
612          return PARSER.parseFrom(data);
613        }
614        public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto parseFrom(
615            com.google.protobuf.ByteString data,
616            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
617            throws com.google.protobuf.InvalidProtocolBufferException {
618          return PARSER.parseFrom(data, extensionRegistry);
619        }
620        public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto parseFrom(byte[] data)
621            throws com.google.protobuf.InvalidProtocolBufferException {
622          return PARSER.parseFrom(data);
623        }
624        public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto parseFrom(
625            byte[] data,
626            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
627            throws com.google.protobuf.InvalidProtocolBufferException {
628          return PARSER.parseFrom(data, extensionRegistry);
629        }
630        public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto parseFrom(java.io.InputStream input)
631            throws java.io.IOException {
632          return PARSER.parseFrom(input);
633        }
634        public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto parseFrom(
635            java.io.InputStream input,
636            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
637            throws java.io.IOException {
638          return PARSER.parseFrom(input, extensionRegistry);
639        }
640        public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto parseDelimitedFrom(java.io.InputStream input)
641            throws java.io.IOException {
642          return PARSER.parseDelimitedFrom(input);
643        }
644        public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto parseDelimitedFrom(
645            java.io.InputStream input,
646            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
647            throws java.io.IOException {
648          return PARSER.parseDelimitedFrom(input, extensionRegistry);
649        }
650        public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto parseFrom(
651            com.google.protobuf.CodedInputStream input)
652            throws java.io.IOException {
653          return PARSER.parseFrom(input);
654        }
655        public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto parseFrom(
656            com.google.protobuf.CodedInputStream input,
657            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
658            throws java.io.IOException {
659          return PARSER.parseFrom(input, extensionRegistry);
660        }
661    
662        public static Builder newBuilder() { return Builder.create(); }
663        public Builder newBuilderForType() { return newBuilder(); }
664        public static Builder newBuilder(org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto prototype) {
665          return newBuilder().mergeFrom(prototype);
666        }
667        public Builder toBuilder() { return newBuilder(this); }
668    
669        @java.lang.Override
670        protected Builder newBuilderForType(
671            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
672          Builder builder = new Builder(parent);
673          return builder;
674        }
675        /**
676         * Protobuf type {@code hadoop.common.CedeActiveResponseProto}
677         */
678        public static final class Builder extends
679            com.google.protobuf.GeneratedMessage.Builder<Builder>
680           implements org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProtoOrBuilder {
681          public static final com.google.protobuf.Descriptors.Descriptor
682              getDescriptor() {
683            return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.internal_static_hadoop_common_CedeActiveResponseProto_descriptor;
684          }
685    
686          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
687              internalGetFieldAccessorTable() {
688            return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.internal_static_hadoop_common_CedeActiveResponseProto_fieldAccessorTable
689                .ensureFieldAccessorsInitialized(
690                    org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto.class, org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto.Builder.class);
691          }
692    
693          // Construct using org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto.newBuilder()
694          private Builder() {
695            maybeForceBuilderInitialization();
696          }
697    
698          private Builder(
699              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
700            super(parent);
701            maybeForceBuilderInitialization();
702          }
703          private void maybeForceBuilderInitialization() {
704            if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
705            }
706          }
707          private static Builder create() {
708            return new Builder();
709          }
710    
711          public Builder clear() {
712            super.clear();
713            return this;
714          }
715    
716          public Builder clone() {
717            return create().mergeFrom(buildPartial());
718          }
719    
720          public com.google.protobuf.Descriptors.Descriptor
721              getDescriptorForType() {
722            return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.internal_static_hadoop_common_CedeActiveResponseProto_descriptor;
723          }
724    
725          public org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto getDefaultInstanceForType() {
726            return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto.getDefaultInstance();
727          }
728    
729          public org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto build() {
730            org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto result = buildPartial();
731            if (!result.isInitialized()) {
732              throw newUninitializedMessageException(result);
733            }
734            return result;
735          }
736    
737          public org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto buildPartial() {
738            org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto result = new org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto(this);
739            onBuilt();
740            return result;
741          }
742    
743          public Builder mergeFrom(com.google.protobuf.Message other) {
744            if (other instanceof org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto) {
745              return mergeFrom((org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto)other);
746            } else {
747              super.mergeFrom(other);
748              return this;
749            }
750          }
751    
752          public Builder mergeFrom(org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto other) {
753            if (other == org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto.getDefaultInstance()) return this;
754            this.mergeUnknownFields(other.getUnknownFields());
755            return this;
756          }
757    
758          public final boolean isInitialized() {
759            return true;
760          }
761    
762          public Builder mergeFrom(
763              com.google.protobuf.CodedInputStream input,
764              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
765              throws java.io.IOException {
766            org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto parsedMessage = null;
767            try {
768              parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
769            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
770              parsedMessage = (org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto) e.getUnfinishedMessage();
771              throw e;
772            } finally {
773              if (parsedMessage != null) {
774                mergeFrom(parsedMessage);
775              }
776            }
777            return this;
778          }
779    
780          // @@protoc_insertion_point(builder_scope:hadoop.common.CedeActiveResponseProto)
781        }
782    
783        static {
784          defaultInstance = new CedeActiveResponseProto(true);
785          defaultInstance.initFields();
786        }
787    
788        // @@protoc_insertion_point(class_scope:hadoop.common.CedeActiveResponseProto)
789      }
790    
791      public interface GracefulFailoverRequestProtoOrBuilder
792          extends com.google.protobuf.MessageOrBuilder {
793      }
794      /**
795       * Protobuf type {@code hadoop.common.GracefulFailoverRequestProto}
796       */
797      public static final class GracefulFailoverRequestProto extends
798          com.google.protobuf.GeneratedMessage
799          implements GracefulFailoverRequestProtoOrBuilder {
800        // Use GracefulFailoverRequestProto.newBuilder() to construct.
801        private GracefulFailoverRequestProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
802          super(builder);
803          this.unknownFields = builder.getUnknownFields();
804        }
805        private GracefulFailoverRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
806    
807        private static final GracefulFailoverRequestProto defaultInstance;
808        public static GracefulFailoverRequestProto getDefaultInstance() {
809          return defaultInstance;
810        }
811    
812        public GracefulFailoverRequestProto getDefaultInstanceForType() {
813          return defaultInstance;
814        }
815    
816        private final com.google.protobuf.UnknownFieldSet unknownFields;
817        @java.lang.Override
818        public final com.google.protobuf.UnknownFieldSet
819            getUnknownFields() {
820          return this.unknownFields;
821        }
822        private GracefulFailoverRequestProto(
823            com.google.protobuf.CodedInputStream input,
824            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
825            throws com.google.protobuf.InvalidProtocolBufferException {
826          initFields();
827          com.google.protobuf.UnknownFieldSet.Builder unknownFields =
828              com.google.protobuf.UnknownFieldSet.newBuilder();
829          try {
830            boolean done = false;
831            while (!done) {
832              int tag = input.readTag();
833              switch (tag) {
834                case 0:
835                  done = true;
836                  break;
837                default: {
838                  if (!parseUnknownField(input, unknownFields,
839                                         extensionRegistry, tag)) {
840                    done = true;
841                  }
842                  break;
843                }
844              }
845            }
846          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
847            throw e.setUnfinishedMessage(this);
848          } catch (java.io.IOException e) {
849            throw new com.google.protobuf.InvalidProtocolBufferException(
850                e.getMessage()).setUnfinishedMessage(this);
851          } finally {
852            this.unknownFields = unknownFields.build();
853            makeExtensionsImmutable();
854          }
855        }
856        public static final com.google.protobuf.Descriptors.Descriptor
857            getDescriptor() {
858          return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.internal_static_hadoop_common_GracefulFailoverRequestProto_descriptor;
859        }
860    
861        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
862            internalGetFieldAccessorTable() {
863          return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.internal_static_hadoop_common_GracefulFailoverRequestProto_fieldAccessorTable
864              .ensureFieldAccessorsInitialized(
865                  org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto.class, org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto.Builder.class);
866        }
867    
868        public static com.google.protobuf.Parser<GracefulFailoverRequestProto> PARSER =
869            new com.google.protobuf.AbstractParser<GracefulFailoverRequestProto>() {
870          public GracefulFailoverRequestProto parsePartialFrom(
871              com.google.protobuf.CodedInputStream input,
872              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
873              throws com.google.protobuf.InvalidProtocolBufferException {
874            return new GracefulFailoverRequestProto(input, extensionRegistry);
875          }
876        };
877    
878        @java.lang.Override
879        public com.google.protobuf.Parser<GracefulFailoverRequestProto> getParserForType() {
880          return PARSER;
881        }
882    
883        private void initFields() {
884        }
885        private byte memoizedIsInitialized = -1;
886        public final boolean isInitialized() {
887          byte isInitialized = memoizedIsInitialized;
888          if (isInitialized != -1) return isInitialized == 1;
889    
890          memoizedIsInitialized = 1;
891          return true;
892        }
893    
894        public void writeTo(com.google.protobuf.CodedOutputStream output)
895                            throws java.io.IOException {
896          getSerializedSize();
897          getUnknownFields().writeTo(output);
898        }
899    
900        private int memoizedSerializedSize = -1;
901        public int getSerializedSize() {
902          int size = memoizedSerializedSize;
903          if (size != -1) return size;
904    
905          size = 0;
906          size += getUnknownFields().getSerializedSize();
907          memoizedSerializedSize = size;
908          return size;
909        }
910    
911        private static final long serialVersionUID = 0L;
912        @java.lang.Override
913        protected java.lang.Object writeReplace()
914            throws java.io.ObjectStreamException {
915          return super.writeReplace();
916        }
917    
918        @java.lang.Override
919        public boolean equals(final java.lang.Object obj) {
920          if (obj == this) {
921           return true;
922          }
923          if (!(obj instanceof org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto)) {
924            return super.equals(obj);
925          }
926          org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto other = (org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto) obj;
927    
928          boolean result = true;
929          result = result &&
930              getUnknownFields().equals(other.getUnknownFields());
931          return result;
932        }
933    
934        private int memoizedHashCode = 0;
935        @java.lang.Override
936        public int hashCode() {
937          if (memoizedHashCode != 0) {
938            return memoizedHashCode;
939          }
940          int hash = 41;
941          hash = (19 * hash) + getDescriptorForType().hashCode();
942          hash = (29 * hash) + getUnknownFields().hashCode();
943          memoizedHashCode = hash;
944          return hash;
945        }
946    
947        public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto parseFrom(
948            com.google.protobuf.ByteString data)
949            throws com.google.protobuf.InvalidProtocolBufferException {
950          return PARSER.parseFrom(data);
951        }
952        public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto parseFrom(
953            com.google.protobuf.ByteString data,
954            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
955            throws com.google.protobuf.InvalidProtocolBufferException {
956          return PARSER.parseFrom(data, extensionRegistry);
957        }
958        public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto parseFrom(byte[] data)
959            throws com.google.protobuf.InvalidProtocolBufferException {
960          return PARSER.parseFrom(data);
961        }
962        public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto parseFrom(
963            byte[] data,
964            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
965            throws com.google.protobuf.InvalidProtocolBufferException {
966          return PARSER.parseFrom(data, extensionRegistry);
967        }
968        public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto parseFrom(java.io.InputStream input)
969            throws java.io.IOException {
970          return PARSER.parseFrom(input);
971        }
972        public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto parseFrom(
973            java.io.InputStream input,
974            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
975            throws java.io.IOException {
976          return PARSER.parseFrom(input, extensionRegistry);
977        }
978        public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto parseDelimitedFrom(java.io.InputStream input)
979            throws java.io.IOException {
980          return PARSER.parseDelimitedFrom(input);
981        }
982        public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto parseDelimitedFrom(
983            java.io.InputStream input,
984            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
985            throws java.io.IOException {
986          return PARSER.parseDelimitedFrom(input, extensionRegistry);
987        }
988        public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto parseFrom(
989            com.google.protobuf.CodedInputStream input)
990            throws java.io.IOException {
991          return PARSER.parseFrom(input);
992        }
993        public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto parseFrom(
994            com.google.protobuf.CodedInputStream input,
995            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
996            throws java.io.IOException {
997          return PARSER.parseFrom(input, extensionRegistry);
998        }
999    
1000        public static Builder newBuilder() { return Builder.create(); }
1001        public Builder newBuilderForType() { return newBuilder(); }
1002        public static Builder newBuilder(org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto prototype) {
1003          return newBuilder().mergeFrom(prototype);
1004        }
1005        public Builder toBuilder() { return newBuilder(this); }
1006    
1007        @java.lang.Override
1008        protected Builder newBuilderForType(
1009            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1010          Builder builder = new Builder(parent);
1011          return builder;
1012        }
1013        /**
1014         * Protobuf type {@code hadoop.common.GracefulFailoverRequestProto}
1015         */
1016        public static final class Builder extends
1017            com.google.protobuf.GeneratedMessage.Builder<Builder>
1018           implements org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProtoOrBuilder {
1019          public static final com.google.protobuf.Descriptors.Descriptor
1020              getDescriptor() {
1021            return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.internal_static_hadoop_common_GracefulFailoverRequestProto_descriptor;
1022          }
1023    
1024          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1025              internalGetFieldAccessorTable() {
1026            return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.internal_static_hadoop_common_GracefulFailoverRequestProto_fieldAccessorTable
1027                .ensureFieldAccessorsInitialized(
1028                    org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto.class, org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto.Builder.class);
1029          }
1030    
1031          // Construct using org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto.newBuilder()
1032          private Builder() {
1033            maybeForceBuilderInitialization();
1034          }
1035    
1036          private Builder(
1037              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1038            super(parent);
1039            maybeForceBuilderInitialization();
1040          }
1041          private void maybeForceBuilderInitialization() {
1042            if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
1043            }
1044          }
1045          private static Builder create() {
1046            return new Builder();
1047          }
1048    
1049          public Builder clear() {
1050            super.clear();
1051            return this;
1052          }
1053    
1054          public Builder clone() {
1055            return create().mergeFrom(buildPartial());
1056          }
1057    
1058          public com.google.protobuf.Descriptors.Descriptor
1059              getDescriptorForType() {
1060            return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.internal_static_hadoop_common_GracefulFailoverRequestProto_descriptor;
1061          }
1062    
1063          public org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto getDefaultInstanceForType() {
1064            return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto.getDefaultInstance();
1065          }
1066    
1067          public org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto build() {
1068            org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto result = buildPartial();
1069            if (!result.isInitialized()) {
1070              throw newUninitializedMessageException(result);
1071            }
1072            return result;
1073          }
1074    
1075          public org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto buildPartial() {
1076            org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto result = new org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto(this);
1077            onBuilt();
1078            return result;
1079          }
1080    
1081          public Builder mergeFrom(com.google.protobuf.Message other) {
1082            if (other instanceof org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto) {
1083              return mergeFrom((org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto)other);
1084            } else {
1085              super.mergeFrom(other);
1086              return this;
1087            }
1088          }
1089    
1090          public Builder mergeFrom(org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto other) {
1091            if (other == org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto.getDefaultInstance()) return this;
1092            this.mergeUnknownFields(other.getUnknownFields());
1093            return this;
1094          }
1095    
1096          public final boolean isInitialized() {
1097            return true;
1098          }
1099    
1100          public Builder mergeFrom(
1101              com.google.protobuf.CodedInputStream input,
1102              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1103              throws java.io.IOException {
1104            org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto parsedMessage = null;
1105            try {
1106              parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
1107            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1108              parsedMessage = (org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto) e.getUnfinishedMessage();
1109              throw e;
1110            } finally {
1111              if (parsedMessage != null) {
1112                mergeFrom(parsedMessage);
1113              }
1114            }
1115            return this;
1116          }
1117    
1118          // @@protoc_insertion_point(builder_scope:hadoop.common.GracefulFailoverRequestProto)
1119        }
1120    
1121        static {
1122          defaultInstance = new GracefulFailoverRequestProto(true);
1123          defaultInstance.initFields();
1124        }
1125    
1126        // @@protoc_insertion_point(class_scope:hadoop.common.GracefulFailoverRequestProto)
1127      }
1128    
1129      public interface GracefulFailoverResponseProtoOrBuilder
1130          extends com.google.protobuf.MessageOrBuilder {
1131      }
1132      /**
1133       * Protobuf type {@code hadoop.common.GracefulFailoverResponseProto}
1134       */
1135      public static final class GracefulFailoverResponseProto extends
1136          com.google.protobuf.GeneratedMessage
1137          implements GracefulFailoverResponseProtoOrBuilder {
1138        // Use GracefulFailoverResponseProto.newBuilder() to construct.
1139        private GracefulFailoverResponseProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
1140          super(builder);
1141          this.unknownFields = builder.getUnknownFields();
1142        }
1143        private GracefulFailoverResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
1144    
1145        private static final GracefulFailoverResponseProto defaultInstance;
1146        public static GracefulFailoverResponseProto getDefaultInstance() {
1147          return defaultInstance;
1148        }
1149    
1150        public GracefulFailoverResponseProto getDefaultInstanceForType() {
1151          return defaultInstance;
1152        }
1153    
1154        private final com.google.protobuf.UnknownFieldSet unknownFields;
1155        @java.lang.Override
1156        public final com.google.protobuf.UnknownFieldSet
1157            getUnknownFields() {
1158          return this.unknownFields;
1159        }
1160        private GracefulFailoverResponseProto(
1161            com.google.protobuf.CodedInputStream input,
1162            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1163            throws com.google.protobuf.InvalidProtocolBufferException {
1164          initFields();
1165          com.google.protobuf.UnknownFieldSet.Builder unknownFields =
1166              com.google.protobuf.UnknownFieldSet.newBuilder();
1167          try {
1168            boolean done = false;
1169            while (!done) {
1170              int tag = input.readTag();
1171              switch (tag) {
1172                case 0:
1173                  done = true;
1174                  break;
1175                default: {
1176                  if (!parseUnknownField(input, unknownFields,
1177                                         extensionRegistry, tag)) {
1178                    done = true;
1179                  }
1180                  break;
1181                }
1182              }
1183            }
1184          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1185            throw e.setUnfinishedMessage(this);
1186          } catch (java.io.IOException e) {
1187            throw new com.google.protobuf.InvalidProtocolBufferException(
1188                e.getMessage()).setUnfinishedMessage(this);
1189          } finally {
1190            this.unknownFields = unknownFields.build();
1191            makeExtensionsImmutable();
1192          }
1193        }
1194        public static final com.google.protobuf.Descriptors.Descriptor
1195            getDescriptor() {
1196          return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.internal_static_hadoop_common_GracefulFailoverResponseProto_descriptor;
1197        }
1198    
1199        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1200            internalGetFieldAccessorTable() {
1201          return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.internal_static_hadoop_common_GracefulFailoverResponseProto_fieldAccessorTable
1202              .ensureFieldAccessorsInitialized(
1203                  org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto.class, org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto.Builder.class);
1204        }
1205    
1206        public static com.google.protobuf.Parser<GracefulFailoverResponseProto> PARSER =
1207            new com.google.protobuf.AbstractParser<GracefulFailoverResponseProto>() {
1208          public GracefulFailoverResponseProto parsePartialFrom(
1209              com.google.protobuf.CodedInputStream input,
1210              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1211              throws com.google.protobuf.InvalidProtocolBufferException {
1212            return new GracefulFailoverResponseProto(input, extensionRegistry);
1213          }
1214        };
1215    
1216        @java.lang.Override
1217        public com.google.protobuf.Parser<GracefulFailoverResponseProto> getParserForType() {
1218          return PARSER;
1219        }
1220    
1221        private void initFields() {
1222        }
1223        private byte memoizedIsInitialized = -1;
1224        public final boolean isInitialized() {
1225          byte isInitialized = memoizedIsInitialized;
1226          if (isInitialized != -1) return isInitialized == 1;
1227    
1228          memoizedIsInitialized = 1;
1229          return true;
1230        }
1231    
1232        public void writeTo(com.google.protobuf.CodedOutputStream output)
1233                            throws java.io.IOException {
1234          getSerializedSize();
1235          getUnknownFields().writeTo(output);
1236        }
1237    
1238        private int memoizedSerializedSize = -1;
1239        public int getSerializedSize() {
1240          int size = memoizedSerializedSize;
1241          if (size != -1) return size;
1242    
1243          size = 0;
1244          size += getUnknownFields().getSerializedSize();
1245          memoizedSerializedSize = size;
1246          return size;
1247        }
1248    
1249        private static final long serialVersionUID = 0L;
1250        @java.lang.Override
1251        protected java.lang.Object writeReplace()
1252            throws java.io.ObjectStreamException {
1253          return super.writeReplace();
1254        }
1255    
1256        @java.lang.Override
1257        public boolean equals(final java.lang.Object obj) {
1258          if (obj == this) {
1259           return true;
1260          }
1261          if (!(obj instanceof org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto)) {
1262            return super.equals(obj);
1263          }
1264          org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto other = (org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto) obj;
1265    
1266          boolean result = true;
1267          result = result &&
1268              getUnknownFields().equals(other.getUnknownFields());
1269          return result;
1270        }
1271    
1272        private int memoizedHashCode = 0;
1273        @java.lang.Override
1274        public int hashCode() {
1275          if (memoizedHashCode != 0) {
1276            return memoizedHashCode;
1277          }
1278          int hash = 41;
1279          hash = (19 * hash) + getDescriptorForType().hashCode();
1280          hash = (29 * hash) + getUnknownFields().hashCode();
1281          memoizedHashCode = hash;
1282          return hash;
1283        }
1284    
1285        public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto parseFrom(
1286            com.google.protobuf.ByteString data)
1287            throws com.google.protobuf.InvalidProtocolBufferException {
1288          return PARSER.parseFrom(data);
1289        }
1290        public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto parseFrom(
1291            com.google.protobuf.ByteString data,
1292            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1293            throws com.google.protobuf.InvalidProtocolBufferException {
1294          return PARSER.parseFrom(data, extensionRegistry);
1295        }
1296        public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto parseFrom(byte[] data)
1297            throws com.google.protobuf.InvalidProtocolBufferException {
1298          return PARSER.parseFrom(data);
1299        }
1300        public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto parseFrom(
1301            byte[] data,
1302            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1303            throws com.google.protobuf.InvalidProtocolBufferException {
1304          return PARSER.parseFrom(data, extensionRegistry);
1305        }
1306        public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto parseFrom(java.io.InputStream input)
1307            throws java.io.IOException {
1308          return PARSER.parseFrom(input);
1309        }
1310        public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto parseFrom(
1311            java.io.InputStream input,
1312            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1313            throws java.io.IOException {
1314          return PARSER.parseFrom(input, extensionRegistry);
1315        }
1316        public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto parseDelimitedFrom(java.io.InputStream input)
1317            throws java.io.IOException {
1318          return PARSER.parseDelimitedFrom(input);
1319        }
1320        public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto parseDelimitedFrom(
1321            java.io.InputStream input,
1322            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1323            throws java.io.IOException {
1324          return PARSER.parseDelimitedFrom(input, extensionRegistry);
1325        }
1326        public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto parseFrom(
1327            com.google.protobuf.CodedInputStream input)
1328            throws java.io.IOException {
1329          return PARSER.parseFrom(input);
1330        }
1331        public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto parseFrom(
1332            com.google.protobuf.CodedInputStream input,
1333            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1334            throws java.io.IOException {
1335          return PARSER.parseFrom(input, extensionRegistry);
1336        }
1337    
1338        public static Builder newBuilder() { return Builder.create(); }
1339        public Builder newBuilderForType() { return newBuilder(); }
1340        public static Builder newBuilder(org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto prototype) {
1341          return newBuilder().mergeFrom(prototype);
1342        }
1343        public Builder toBuilder() { return newBuilder(this); }
1344    
1345        @java.lang.Override
1346        protected Builder newBuilderForType(
1347            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1348          Builder builder = new Builder(parent);
1349          return builder;
1350        }
1351        /**
1352         * Protobuf type {@code hadoop.common.GracefulFailoverResponseProto}
1353         */
1354        public static final class Builder extends
1355            com.google.protobuf.GeneratedMessage.Builder<Builder>
1356           implements org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProtoOrBuilder {
1357          public static final com.google.protobuf.Descriptors.Descriptor
1358              getDescriptor() {
1359            return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.internal_static_hadoop_common_GracefulFailoverResponseProto_descriptor;
1360          }
1361    
1362          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1363              internalGetFieldAccessorTable() {
1364            return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.internal_static_hadoop_common_GracefulFailoverResponseProto_fieldAccessorTable
1365                .ensureFieldAccessorsInitialized(
1366                    org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto.class, org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto.Builder.class);
1367          }
1368    
1369          // Construct using org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto.newBuilder()
1370          private Builder() {
1371            maybeForceBuilderInitialization();
1372          }
1373    
1374          private Builder(
1375              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1376            super(parent);
1377            maybeForceBuilderInitialization();
1378          }
1379          private void maybeForceBuilderInitialization() {
1380            if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
1381            }
1382          }
1383          private static Builder create() {
1384            return new Builder();
1385          }
1386    
1387          public Builder clear() {
1388            super.clear();
1389            return this;
1390          }
1391    
1392          public Builder clone() {
1393            return create().mergeFrom(buildPartial());
1394          }
1395    
1396          public com.google.protobuf.Descriptors.Descriptor
1397              getDescriptorForType() {
1398            return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.internal_static_hadoop_common_GracefulFailoverResponseProto_descriptor;
1399          }
1400    
1401          public org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto getDefaultInstanceForType() {
1402            return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto.getDefaultInstance();
1403          }
1404    
1405          public org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto build() {
1406            org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto result = buildPartial();
1407            if (!result.isInitialized()) {
1408              throw newUninitializedMessageException(result);
1409            }
1410            return result;
1411          }
1412    
1413          public org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto buildPartial() {
1414            org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto result = new org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto(this);
1415            onBuilt();
1416            return result;
1417          }
1418    
1419          public Builder mergeFrom(com.google.protobuf.Message other) {
1420            if (other instanceof org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto) {
1421              return mergeFrom((org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto)other);
1422            } else {
1423              super.mergeFrom(other);
1424              return this;
1425            }
1426          }
1427    
1428          public Builder mergeFrom(org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto other) {
1429            if (other == org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto.getDefaultInstance()) return this;
1430            this.mergeUnknownFields(other.getUnknownFields());
1431            return this;
1432          }
1433    
1434          public final boolean isInitialized() {
1435            return true;
1436          }
1437    
1438          public Builder mergeFrom(
1439              com.google.protobuf.CodedInputStream input,
1440              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1441              throws java.io.IOException {
1442            org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto parsedMessage = null;
1443            try {
1444              parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
1445            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1446              parsedMessage = (org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto) e.getUnfinishedMessage();
1447              throw e;
1448            } finally {
1449              if (parsedMessage != null) {
1450                mergeFrom(parsedMessage);
1451              }
1452            }
1453            return this;
1454          }
1455    
1456          // @@protoc_insertion_point(builder_scope:hadoop.common.GracefulFailoverResponseProto)
1457        }
1458    
1459        static {
1460          defaultInstance = new GracefulFailoverResponseProto(true);
1461          defaultInstance.initFields();
1462        }
1463    
1464        // @@protoc_insertion_point(class_scope:hadoop.common.GracefulFailoverResponseProto)
1465      }
1466    
1467      /**
1468       * Protobuf service {@code hadoop.common.ZKFCProtocolService}
1469       *
1470       * <pre>
1471       **
1472       * Protocol provides manual control of the ZK Failover Controllers
1473       * </pre>
1474       */
1475      public static abstract class ZKFCProtocolService
1476          implements com.google.protobuf.Service {
1477        protected ZKFCProtocolService() {}
1478    
1479        public interface Interface {
1480          /**
1481           * <code>rpc cedeActive(.hadoop.common.CedeActiveRequestProto) returns (.hadoop.common.CedeActiveResponseProto);</code>
1482           *
1483           * <pre>
1484           **
1485           * Request that the service cede its active state, and quit the election
1486           * for some amount of time
1487           * </pre>
1488           */
1489          public abstract void cedeActive(
1490              com.google.protobuf.RpcController controller,
1491              org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto request,
1492              com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto> done);
1493    
1494          /**
1495           * <code>rpc gracefulFailover(.hadoop.common.GracefulFailoverRequestProto) returns (.hadoop.common.GracefulFailoverResponseProto);</code>
1496           */
1497          public abstract void gracefulFailover(
1498              com.google.protobuf.RpcController controller,
1499              org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto request,
1500              com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto> done);
1501    
1502        }
1503    
1504        public static com.google.protobuf.Service newReflectiveService(
1505            final Interface impl) {
1506          return new ZKFCProtocolService() {
1507            @java.lang.Override
1508            public  void cedeActive(
1509                com.google.protobuf.RpcController controller,
1510                org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto request,
1511                com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto> done) {
1512              impl.cedeActive(controller, request, done);
1513            }
1514    
1515            @java.lang.Override
1516            public  void gracefulFailover(
1517                com.google.protobuf.RpcController controller,
1518                org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto request,
1519                com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto> done) {
1520              impl.gracefulFailover(controller, request, done);
1521            }
1522    
1523          };
1524        }
1525    
1526        public static com.google.protobuf.BlockingService
1527            newReflectiveBlockingService(final BlockingInterface impl) {
1528          return new com.google.protobuf.BlockingService() {
1529            public final com.google.protobuf.Descriptors.ServiceDescriptor
1530                getDescriptorForType() {
1531              return getDescriptor();
1532            }
1533    
1534            public final com.google.protobuf.Message callBlockingMethod(
1535                com.google.protobuf.Descriptors.MethodDescriptor method,
1536                com.google.protobuf.RpcController controller,
1537                com.google.protobuf.Message request)
1538                throws com.google.protobuf.ServiceException {
1539              if (method.getService() != getDescriptor()) {
1540                throw new java.lang.IllegalArgumentException(
1541                  "Service.callBlockingMethod() given method descriptor for " +
1542                  "wrong service type.");
1543              }
1544              switch(method.getIndex()) {
1545                case 0:
1546                  return impl.cedeActive(controller, (org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto)request);
1547                case 1:
1548                  return impl.gracefulFailover(controller, (org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto)request);
1549                default:
1550                  throw new java.lang.AssertionError("Can't get here.");
1551              }
1552            }
1553    
1554            public final com.google.protobuf.Message
1555                getRequestPrototype(
1556                com.google.protobuf.Descriptors.MethodDescriptor method) {
1557              if (method.getService() != getDescriptor()) {
1558                throw new java.lang.IllegalArgumentException(
1559                  "Service.getRequestPrototype() given method " +
1560                  "descriptor for wrong service type.");
1561              }
1562              switch(method.getIndex()) {
1563                case 0:
1564                  return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto.getDefaultInstance();
1565                case 1:
1566                  return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto.getDefaultInstance();
1567                default:
1568                  throw new java.lang.AssertionError("Can't get here.");
1569              }
1570            }
1571    
1572            public final com.google.protobuf.Message
1573                getResponsePrototype(
1574                com.google.protobuf.Descriptors.MethodDescriptor method) {
1575              if (method.getService() != getDescriptor()) {
1576                throw new java.lang.IllegalArgumentException(
1577                  "Service.getResponsePrototype() given method " +
1578                  "descriptor for wrong service type.");
1579              }
1580              switch(method.getIndex()) {
1581                case 0:
1582                  return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto.getDefaultInstance();
1583                case 1:
1584                  return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto.getDefaultInstance();
1585                default:
1586                  throw new java.lang.AssertionError("Can't get here.");
1587              }
1588            }
1589    
1590          };
1591        }
1592    
1593        /**
1594         * <code>rpc cedeActive(.hadoop.common.CedeActiveRequestProto) returns (.hadoop.common.CedeActiveResponseProto);</code>
1595         *
1596         * <pre>
1597         **
1598         * Request that the service cede its active state, and quit the election
1599         * for some amount of time
1600         * </pre>
1601         */
1602        public abstract void cedeActive(
1603            com.google.protobuf.RpcController controller,
1604            org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto request,
1605            com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto> done);
1606    
1607        /**
1608         * <code>rpc gracefulFailover(.hadoop.common.GracefulFailoverRequestProto) returns (.hadoop.common.GracefulFailoverResponseProto);</code>
1609         */
1610        public abstract void gracefulFailover(
1611            com.google.protobuf.RpcController controller,
1612            org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto request,
1613            com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto> done);
1614    
1615        public static final
1616            com.google.protobuf.Descriptors.ServiceDescriptor
1617            getDescriptor() {
1618          return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.getDescriptor().getServices().get(0);
1619        }
1620        public final com.google.protobuf.Descriptors.ServiceDescriptor
1621            getDescriptorForType() {
1622          return getDescriptor();
1623        }
1624    
1625        public final void callMethod(
1626            com.google.protobuf.Descriptors.MethodDescriptor method,
1627            com.google.protobuf.RpcController controller,
1628            com.google.protobuf.Message request,
1629            com.google.protobuf.RpcCallback<
1630              com.google.protobuf.Message> done) {
1631          if (method.getService() != getDescriptor()) {
1632            throw new java.lang.IllegalArgumentException(
1633              "Service.callMethod() given method descriptor for wrong " +
1634              "service type.");
1635          }
1636          switch(method.getIndex()) {
1637            case 0:
1638              this.cedeActive(controller, (org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto)request,
1639                com.google.protobuf.RpcUtil.<org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto>specializeCallback(
1640                  done));
1641              return;
1642            case 1:
1643              this.gracefulFailover(controller, (org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto)request,
1644                com.google.protobuf.RpcUtil.<org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto>specializeCallback(
1645                  done));
1646              return;
1647            default:
1648              throw new java.lang.AssertionError("Can't get here.");
1649          }
1650        }
1651    
1652        public final com.google.protobuf.Message
1653            getRequestPrototype(
1654            com.google.protobuf.Descriptors.MethodDescriptor method) {
1655          if (method.getService() != getDescriptor()) {
1656            throw new java.lang.IllegalArgumentException(
1657              "Service.getRequestPrototype() given method " +
1658              "descriptor for wrong service type.");
1659          }
1660          switch(method.getIndex()) {
1661            case 0:
1662              return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto.getDefaultInstance();
1663            case 1:
1664              return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto.getDefaultInstance();
1665            default:
1666              throw new java.lang.AssertionError("Can't get here.");
1667          }
1668        }
1669    
1670        public final com.google.protobuf.Message
1671            getResponsePrototype(
1672            com.google.protobuf.Descriptors.MethodDescriptor method) {
1673          if (method.getService() != getDescriptor()) {
1674            throw new java.lang.IllegalArgumentException(
1675              "Service.getResponsePrototype() given method " +
1676              "descriptor for wrong service type.");
1677          }
1678          switch(method.getIndex()) {
1679            case 0:
1680              return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto.getDefaultInstance();
1681            case 1:
1682              return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto.getDefaultInstance();
1683            default:
1684              throw new java.lang.AssertionError("Can't get here.");
1685          }
1686        }
1687    
1688        public static Stub newStub(
1689            com.google.protobuf.RpcChannel channel) {
1690          return new Stub(channel);
1691        }
1692    
1693        public static final class Stub extends org.apache.hadoop.ha.proto.ZKFCProtocolProtos.ZKFCProtocolService implements Interface {
1694          private Stub(com.google.protobuf.RpcChannel channel) {
1695            this.channel = channel;
1696          }
1697    
1698          private final com.google.protobuf.RpcChannel channel;
1699    
1700          public com.google.protobuf.RpcChannel getChannel() {
1701            return channel;
1702          }
1703    
1704          public  void cedeActive(
1705              com.google.protobuf.RpcController controller,
1706              org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto request,
1707              com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto> done) {
1708            channel.callMethod(
1709              getDescriptor().getMethods().get(0),
1710              controller,
1711              request,
1712              org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto.getDefaultInstance(),
1713              com.google.protobuf.RpcUtil.generalizeCallback(
1714                done,
1715                org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto.class,
1716                org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto.getDefaultInstance()));
1717          }
1718    
1719          public  void gracefulFailover(
1720              com.google.protobuf.RpcController controller,
1721              org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto request,
1722              com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto> done) {
1723            channel.callMethod(
1724              getDescriptor().getMethods().get(1),
1725              controller,
1726              request,
1727              org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto.getDefaultInstance(),
1728              com.google.protobuf.RpcUtil.generalizeCallback(
1729                done,
1730                org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto.class,
1731                org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto.getDefaultInstance()));
1732          }
1733        }
1734    
1735        public static BlockingInterface newBlockingStub(
1736            com.google.protobuf.BlockingRpcChannel channel) {
1737          return new BlockingStub(channel);
1738        }
1739    
1740        public interface BlockingInterface {
1741          public org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto cedeActive(
1742              com.google.protobuf.RpcController controller,
1743              org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto request)
1744              throws com.google.protobuf.ServiceException;
1745    
1746          public org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto gracefulFailover(
1747              com.google.protobuf.RpcController controller,
1748              org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto request)
1749              throws com.google.protobuf.ServiceException;
1750        }
1751    
1752        private static final class BlockingStub implements BlockingInterface {
1753          private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
1754            this.channel = channel;
1755          }
1756    
1757          private final com.google.protobuf.BlockingRpcChannel channel;
1758    
1759          public org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto cedeActive(
1760              com.google.protobuf.RpcController controller,
1761              org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto request)
1762              throws com.google.protobuf.ServiceException {
1763            return (org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto) channel.callBlockingMethod(
1764              getDescriptor().getMethods().get(0),
1765              controller,
1766              request,
1767              org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto.getDefaultInstance());
1768          }
1769    
1770    
1771          public org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto gracefulFailover(
1772              com.google.protobuf.RpcController controller,
1773              org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto request)
1774              throws com.google.protobuf.ServiceException {
1775            return (org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto) channel.callBlockingMethod(
1776              getDescriptor().getMethods().get(1),
1777              controller,
1778              request,
1779              org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto.getDefaultInstance());
1780          }
1781    
1782        }
1783    
1784        // @@protoc_insertion_point(class_scope:hadoop.common.ZKFCProtocolService)
1785      }
1786    
1787      private static com.google.protobuf.Descriptors.Descriptor
1788        internal_static_hadoop_common_CedeActiveRequestProto_descriptor;
1789      private static
1790        com.google.protobuf.GeneratedMessage.FieldAccessorTable
1791          internal_static_hadoop_common_CedeActiveRequestProto_fieldAccessorTable;
1792      private static com.google.protobuf.Descriptors.Descriptor
1793        internal_static_hadoop_common_CedeActiveResponseProto_descriptor;
1794      private static
1795        com.google.protobuf.GeneratedMessage.FieldAccessorTable
1796          internal_static_hadoop_common_CedeActiveResponseProto_fieldAccessorTable;
1797      private static com.google.protobuf.Descriptors.Descriptor
1798        internal_static_hadoop_common_GracefulFailoverRequestProto_descriptor;
1799      private static
1800        com.google.protobuf.GeneratedMessage.FieldAccessorTable
1801          internal_static_hadoop_common_GracefulFailoverRequestProto_fieldAccessorTable;
1802      private static com.google.protobuf.Descriptors.Descriptor
1803        internal_static_hadoop_common_GracefulFailoverResponseProto_descriptor;
1804      private static
1805        com.google.protobuf.GeneratedMessage.FieldAccessorTable
1806          internal_static_hadoop_common_GracefulFailoverResponseProto_fieldAccessorTable;
1807    
1808      public static com.google.protobuf.Descriptors.FileDescriptor
1809          getDescriptor() {
1810        return descriptor;
1811      }
1812      private static com.google.protobuf.Descriptors.FileDescriptor
1813          descriptor;
1814      static {
1815        java.lang.String[] descriptorData = {
1816          "\n\022ZKFCProtocol.proto\022\rhadoop.common\".\n\026C" +
1817          "edeActiveRequestProto\022\024\n\014millisToCede\030\001 " +
1818          "\002(\r\"\031\n\027CedeActiveResponseProto\"\036\n\034Gracef" +
1819          "ulFailoverRequestProto\"\037\n\035GracefulFailov" +
1820          "erResponseProto2\341\001\n\023ZKFCProtocolService\022" +
1821          "[\n\ncedeActive\022%.hadoop.common.CedeActive" +
1822          "RequestProto\032&.hadoop.common.CedeActiveR" +
1823          "esponseProto\022m\n\020gracefulFailover\022+.hadoo" +
1824          "p.common.GracefulFailoverRequestProto\032,." +
1825          "hadoop.common.GracefulFailoverResponsePr",
1826          "otoB6\n\032org.apache.hadoop.ha.protoB\022ZKFCP" +
1827          "rotocolProtos\210\001\001\240\001\001"
1828        };
1829        com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
1830          new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
1831            public com.google.protobuf.ExtensionRegistry assignDescriptors(
1832                com.google.protobuf.Descriptors.FileDescriptor root) {
1833              descriptor = root;
1834              internal_static_hadoop_common_CedeActiveRequestProto_descriptor =
1835                getDescriptor().getMessageTypes().get(0);
1836              internal_static_hadoop_common_CedeActiveRequestProto_fieldAccessorTable = new
1837                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
1838                  internal_static_hadoop_common_CedeActiveRequestProto_descriptor,
1839                  new java.lang.String[] { "MillisToCede", });
1840              internal_static_hadoop_common_CedeActiveResponseProto_descriptor =
1841                getDescriptor().getMessageTypes().get(1);
1842              internal_static_hadoop_common_CedeActiveResponseProto_fieldAccessorTable = new
1843                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
1844                  internal_static_hadoop_common_CedeActiveResponseProto_descriptor,
1845                  new java.lang.String[] { });
1846              internal_static_hadoop_common_GracefulFailoverRequestProto_descriptor =
1847                getDescriptor().getMessageTypes().get(2);
1848              internal_static_hadoop_common_GracefulFailoverRequestProto_fieldAccessorTable = new
1849                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
1850                  internal_static_hadoop_common_GracefulFailoverRequestProto_descriptor,
1851                  new java.lang.String[] { });
1852              internal_static_hadoop_common_GracefulFailoverResponseProto_descriptor =
1853                getDescriptor().getMessageTypes().get(3);
1854              internal_static_hadoop_common_GracefulFailoverResponseProto_fieldAccessorTable = new
1855                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
1856                  internal_static_hadoop_common_GracefulFailoverResponseProto_descriptor,
1857                  new java.lang.String[] { });
1858              return null;
1859            }
1860          };
1861        com.google.protobuf.Descriptors.FileDescriptor
1862          .internalBuildGeneratedFileFrom(descriptorData,
1863            new com.google.protobuf.Descriptors.FileDescriptor[] {
1864            }, assigner);
1865      }
1866    
1867      // @@protoc_insertion_point(outer_class_scope)
1868    }