001// Generated by the protocol buffer compiler.  DO NOT EDIT!
002// source: ZKFCProtocol.proto
003
004package org.apache.hadoop.ha.proto;
005
006public final class ZKFCProtocolProtos {
007  private ZKFCProtocolProtos() {}
008  public static void registerAllExtensions(
009      com.google.protobuf.ExtensionRegistry registry) {
010  }
011  public interface CedeActiveRequestProtoOrBuilder
012      extends com.google.protobuf.MessageOrBuilder {
013    
014    // required uint32 millisToCede = 1;
015    boolean hasMillisToCede();
016    int getMillisToCede();
017  }
018  public static final class CedeActiveRequestProto extends
019      com.google.protobuf.GeneratedMessage
020      implements CedeActiveRequestProtoOrBuilder {
021    // Use CedeActiveRequestProto.newBuilder() to construct.
022    private CedeActiveRequestProto(Builder builder) {
023      super(builder);
024    }
025    private CedeActiveRequestProto(boolean noInit) {}
026    
027    private static final CedeActiveRequestProto defaultInstance;
028    public static CedeActiveRequestProto getDefaultInstance() {
029      return defaultInstance;
030    }
031    
032    public CedeActiveRequestProto getDefaultInstanceForType() {
033      return defaultInstance;
034    }
035    
036    public static final com.google.protobuf.Descriptors.Descriptor
037        getDescriptor() {
038      return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.internal_static_hadoop_common_CedeActiveRequestProto_descriptor;
039    }
040    
041    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
042        internalGetFieldAccessorTable() {
043      return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.internal_static_hadoop_common_CedeActiveRequestProto_fieldAccessorTable;
044    }
045    
046    private int bitField0_;
047    // required uint32 millisToCede = 1;
048    public static final int MILLISTOCEDE_FIELD_NUMBER = 1;
049    private int millisToCede_;
050    public boolean hasMillisToCede() {
051      return ((bitField0_ & 0x00000001) == 0x00000001);
052    }
053    public int getMillisToCede() {
054      return millisToCede_;
055    }
056    
057    private void initFields() {
058      millisToCede_ = 0;
059    }
060    private byte memoizedIsInitialized = -1;
061    public final boolean isInitialized() {
062      byte isInitialized = memoizedIsInitialized;
063      if (isInitialized != -1) return isInitialized == 1;
064      
065      if (!hasMillisToCede()) {
066        memoizedIsInitialized = 0;
067        return false;
068      }
069      memoizedIsInitialized = 1;
070      return true;
071    }
072    
073    public void writeTo(com.google.protobuf.CodedOutputStream output)
074                        throws java.io.IOException {
075      getSerializedSize();
076      if (((bitField0_ & 0x00000001) == 0x00000001)) {
077        output.writeUInt32(1, millisToCede_);
078      }
079      getUnknownFields().writeTo(output);
080    }
081    
082    private int memoizedSerializedSize = -1;
083    public int getSerializedSize() {
084      int size = memoizedSerializedSize;
085      if (size != -1) return size;
086    
087      size = 0;
088      if (((bitField0_ & 0x00000001) == 0x00000001)) {
089        size += com.google.protobuf.CodedOutputStream
090          .computeUInt32Size(1, millisToCede_);
091      }
092      size += getUnknownFields().getSerializedSize();
093      memoizedSerializedSize = size;
094      return size;
095    }
096    
097    private static final long serialVersionUID = 0L;
098    @java.lang.Override
099    protected java.lang.Object writeReplace()
100        throws java.io.ObjectStreamException {
101      return super.writeReplace();
102    }
103    
104    @java.lang.Override
105    public boolean equals(final java.lang.Object obj) {
106      if (obj == this) {
107       return true;
108      }
109      if (!(obj instanceof org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto)) {
110        return super.equals(obj);
111      }
112      org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto other = (org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto) obj;
113      
114      boolean result = true;
115      result = result && (hasMillisToCede() == other.hasMillisToCede());
116      if (hasMillisToCede()) {
117        result = result && (getMillisToCede()
118            == other.getMillisToCede());
119      }
120      result = result &&
121          getUnknownFields().equals(other.getUnknownFields());
122      return result;
123    }
124    
125    @java.lang.Override
126    public int hashCode() {
127      int hash = 41;
128      hash = (19 * hash) + getDescriptorForType().hashCode();
129      if (hasMillisToCede()) {
130        hash = (37 * hash) + MILLISTOCEDE_FIELD_NUMBER;
131        hash = (53 * hash) + getMillisToCede();
132      }
133      hash = (29 * hash) + getUnknownFields().hashCode();
134      return hash;
135    }
136    
137    public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto parseFrom(
138        com.google.protobuf.ByteString data)
139        throws com.google.protobuf.InvalidProtocolBufferException {
140      return newBuilder().mergeFrom(data).buildParsed();
141    }
142    public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto parseFrom(
143        com.google.protobuf.ByteString data,
144        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
145        throws com.google.protobuf.InvalidProtocolBufferException {
146      return newBuilder().mergeFrom(data, extensionRegistry)
147               .buildParsed();
148    }
149    public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto parseFrom(byte[] data)
150        throws com.google.protobuf.InvalidProtocolBufferException {
151      return newBuilder().mergeFrom(data).buildParsed();
152    }
153    public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto parseFrom(
154        byte[] data,
155        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
156        throws com.google.protobuf.InvalidProtocolBufferException {
157      return newBuilder().mergeFrom(data, extensionRegistry)
158               .buildParsed();
159    }
160    public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto parseFrom(java.io.InputStream input)
161        throws java.io.IOException {
162      return newBuilder().mergeFrom(input).buildParsed();
163    }
164    public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto parseFrom(
165        java.io.InputStream input,
166        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
167        throws java.io.IOException {
168      return newBuilder().mergeFrom(input, extensionRegistry)
169               .buildParsed();
170    }
171    public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto parseDelimitedFrom(java.io.InputStream input)
172        throws java.io.IOException {
173      Builder builder = newBuilder();
174      if (builder.mergeDelimitedFrom(input)) {
175        return builder.buildParsed();
176      } else {
177        return null;
178      }
179    }
180    public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto parseDelimitedFrom(
181        java.io.InputStream input,
182        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
183        throws java.io.IOException {
184      Builder builder = newBuilder();
185      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
186        return builder.buildParsed();
187      } else {
188        return null;
189      }
190    }
191    public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto parseFrom(
192        com.google.protobuf.CodedInputStream input)
193        throws java.io.IOException {
194      return newBuilder().mergeFrom(input).buildParsed();
195    }
196    public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto parseFrom(
197        com.google.protobuf.CodedInputStream input,
198        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
199        throws java.io.IOException {
200      return newBuilder().mergeFrom(input, extensionRegistry)
201               .buildParsed();
202    }
203    
204    public static Builder newBuilder() { return Builder.create(); }
205    public Builder newBuilderForType() { return newBuilder(); }
206    public static Builder newBuilder(org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto prototype) {
207      return newBuilder().mergeFrom(prototype);
208    }
209    public Builder toBuilder() { return newBuilder(this); }
210    
211    @java.lang.Override
212    protected Builder newBuilderForType(
213        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
214      Builder builder = new Builder(parent);
215      return builder;
216    }
217    public static final class Builder extends
218        com.google.protobuf.GeneratedMessage.Builder<Builder>
219       implements org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProtoOrBuilder {
220      public static final com.google.protobuf.Descriptors.Descriptor
221          getDescriptor() {
222        return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.internal_static_hadoop_common_CedeActiveRequestProto_descriptor;
223      }
224      
225      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
226          internalGetFieldAccessorTable() {
227        return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.internal_static_hadoop_common_CedeActiveRequestProto_fieldAccessorTable;
228      }
229      
230      // Construct using org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto.newBuilder()
231      private Builder() {
232        maybeForceBuilderInitialization();
233      }
234      
235      private Builder(BuilderParent parent) {
236        super(parent);
237        maybeForceBuilderInitialization();
238      }
239      private void maybeForceBuilderInitialization() {
240        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
241        }
242      }
243      private static Builder create() {
244        return new Builder();
245      }
246      
247      public Builder clear() {
248        super.clear();
249        millisToCede_ = 0;
250        bitField0_ = (bitField0_ & ~0x00000001);
251        return this;
252      }
253      
254      public Builder clone() {
255        return create().mergeFrom(buildPartial());
256      }
257      
258      public com.google.protobuf.Descriptors.Descriptor
259          getDescriptorForType() {
260        return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto.getDescriptor();
261      }
262      
263      public org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto getDefaultInstanceForType() {
264        return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto.getDefaultInstance();
265      }
266      
267      public org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto build() {
268        org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto result = buildPartial();
269        if (!result.isInitialized()) {
270          throw newUninitializedMessageException(result);
271        }
272        return result;
273      }
274      
275      private org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto buildParsed()
276          throws com.google.protobuf.InvalidProtocolBufferException {
277        org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto result = buildPartial();
278        if (!result.isInitialized()) {
279          throw newUninitializedMessageException(
280            result).asInvalidProtocolBufferException();
281        }
282        return result;
283      }
284      
285      public org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto buildPartial() {
286        org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto result = new org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto(this);
287        int from_bitField0_ = bitField0_;
288        int to_bitField0_ = 0;
289        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
290          to_bitField0_ |= 0x00000001;
291        }
292        result.millisToCede_ = millisToCede_;
293        result.bitField0_ = to_bitField0_;
294        onBuilt();
295        return result;
296      }
297      
298      public Builder mergeFrom(com.google.protobuf.Message other) {
299        if (other instanceof org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto) {
300          return mergeFrom((org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto)other);
301        } else {
302          super.mergeFrom(other);
303          return this;
304        }
305      }
306      
307      public Builder mergeFrom(org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto other) {
308        if (other == org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto.getDefaultInstance()) return this;
309        if (other.hasMillisToCede()) {
310          setMillisToCede(other.getMillisToCede());
311        }
312        this.mergeUnknownFields(other.getUnknownFields());
313        return this;
314      }
315      
316      public final boolean isInitialized() {
317        if (!hasMillisToCede()) {
318          
319          return false;
320        }
321        return true;
322      }
323      
324      public Builder mergeFrom(
325          com.google.protobuf.CodedInputStream input,
326          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
327          throws java.io.IOException {
328        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
329          com.google.protobuf.UnknownFieldSet.newBuilder(
330            this.getUnknownFields());
331        while (true) {
332          int tag = input.readTag();
333          switch (tag) {
334            case 0:
335              this.setUnknownFields(unknownFields.build());
336              onChanged();
337              return this;
338            default: {
339              if (!parseUnknownField(input, unknownFields,
340                                     extensionRegistry, tag)) {
341                this.setUnknownFields(unknownFields.build());
342                onChanged();
343                return this;
344              }
345              break;
346            }
347            case 8: {
348              bitField0_ |= 0x00000001;
349              millisToCede_ = input.readUInt32();
350              break;
351            }
352          }
353        }
354      }
355      
356      private int bitField0_;
357      
358      // required uint32 millisToCede = 1;
359      private int millisToCede_ ;
360      public boolean hasMillisToCede() {
361        return ((bitField0_ & 0x00000001) == 0x00000001);
362      }
363      public int getMillisToCede() {
364        return millisToCede_;
365      }
366      public Builder setMillisToCede(int value) {
367        bitField0_ |= 0x00000001;
368        millisToCede_ = value;
369        onChanged();
370        return this;
371      }
372      public Builder clearMillisToCede() {
373        bitField0_ = (bitField0_ & ~0x00000001);
374        millisToCede_ = 0;
375        onChanged();
376        return this;
377      }
378      
379      // @@protoc_insertion_point(builder_scope:hadoop.common.CedeActiveRequestProto)
380    }
381    
382    static {
383      defaultInstance = new CedeActiveRequestProto(true);
384      defaultInstance.initFields();
385    }
386    
387    // @@protoc_insertion_point(class_scope:hadoop.common.CedeActiveRequestProto)
388  }
389  
390  public interface CedeActiveResponseProtoOrBuilder
391      extends com.google.protobuf.MessageOrBuilder {
392  }
393  public static final class CedeActiveResponseProto extends
394      com.google.protobuf.GeneratedMessage
395      implements CedeActiveResponseProtoOrBuilder {
396    // Use CedeActiveResponseProto.newBuilder() to construct.
397    private CedeActiveResponseProto(Builder builder) {
398      super(builder);
399    }
400    private CedeActiveResponseProto(boolean noInit) {}
401    
402    private static final CedeActiveResponseProto defaultInstance;
403    public static CedeActiveResponseProto getDefaultInstance() {
404      return defaultInstance;
405    }
406    
407    public CedeActiveResponseProto getDefaultInstanceForType() {
408      return defaultInstance;
409    }
410    
411    public static final com.google.protobuf.Descriptors.Descriptor
412        getDescriptor() {
413      return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.internal_static_hadoop_common_CedeActiveResponseProto_descriptor;
414    }
415    
416    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
417        internalGetFieldAccessorTable() {
418      return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.internal_static_hadoop_common_CedeActiveResponseProto_fieldAccessorTable;
419    }
420    
421    private void initFields() {
422    }
423    private byte memoizedIsInitialized = -1;
424    public final boolean isInitialized() {
425      byte isInitialized = memoizedIsInitialized;
426      if (isInitialized != -1) return isInitialized == 1;
427      
428      memoizedIsInitialized = 1;
429      return true;
430    }
431    
432    public void writeTo(com.google.protobuf.CodedOutputStream output)
433                        throws java.io.IOException {
434      getSerializedSize();
435      getUnknownFields().writeTo(output);
436    }
437    
438    private int memoizedSerializedSize = -1;
439    public int getSerializedSize() {
440      int size = memoizedSerializedSize;
441      if (size != -1) return size;
442    
443      size = 0;
444      size += getUnknownFields().getSerializedSize();
445      memoizedSerializedSize = size;
446      return size;
447    }
448    
449    private static final long serialVersionUID = 0L;
450    @java.lang.Override
451    protected java.lang.Object writeReplace()
452        throws java.io.ObjectStreamException {
453      return super.writeReplace();
454    }
455    
456    @java.lang.Override
457    public boolean equals(final java.lang.Object obj) {
458      if (obj == this) {
459       return true;
460      }
461      if (!(obj instanceof org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto)) {
462        return super.equals(obj);
463      }
464      org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto other = (org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto) obj;
465      
466      boolean result = true;
467      result = result &&
468          getUnknownFields().equals(other.getUnknownFields());
469      return result;
470    }
471    
472    @java.lang.Override
473    public int hashCode() {
474      int hash = 41;
475      hash = (19 * hash) + getDescriptorForType().hashCode();
476      hash = (29 * hash) + getUnknownFields().hashCode();
477      return hash;
478    }
479    
480    public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto parseFrom(
481        com.google.protobuf.ByteString data)
482        throws com.google.protobuf.InvalidProtocolBufferException {
483      return newBuilder().mergeFrom(data).buildParsed();
484    }
485    public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto parseFrom(
486        com.google.protobuf.ByteString data,
487        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
488        throws com.google.protobuf.InvalidProtocolBufferException {
489      return newBuilder().mergeFrom(data, extensionRegistry)
490               .buildParsed();
491    }
492    public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto parseFrom(byte[] data)
493        throws com.google.protobuf.InvalidProtocolBufferException {
494      return newBuilder().mergeFrom(data).buildParsed();
495    }
496    public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto parseFrom(
497        byte[] data,
498        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
499        throws com.google.protobuf.InvalidProtocolBufferException {
500      return newBuilder().mergeFrom(data, extensionRegistry)
501               .buildParsed();
502    }
503    public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto parseFrom(java.io.InputStream input)
504        throws java.io.IOException {
505      return newBuilder().mergeFrom(input).buildParsed();
506    }
507    public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto parseFrom(
508        java.io.InputStream input,
509        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
510        throws java.io.IOException {
511      return newBuilder().mergeFrom(input, extensionRegistry)
512               .buildParsed();
513    }
514    public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto parseDelimitedFrom(java.io.InputStream input)
515        throws java.io.IOException {
516      Builder builder = newBuilder();
517      if (builder.mergeDelimitedFrom(input)) {
518        return builder.buildParsed();
519      } else {
520        return null;
521      }
522    }
523    public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto parseDelimitedFrom(
524        java.io.InputStream input,
525        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
526        throws java.io.IOException {
527      Builder builder = newBuilder();
528      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
529        return builder.buildParsed();
530      } else {
531        return null;
532      }
533    }
534    public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto parseFrom(
535        com.google.protobuf.CodedInputStream input)
536        throws java.io.IOException {
537      return newBuilder().mergeFrom(input).buildParsed();
538    }
539    public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto parseFrom(
540        com.google.protobuf.CodedInputStream input,
541        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
542        throws java.io.IOException {
543      return newBuilder().mergeFrom(input, extensionRegistry)
544               .buildParsed();
545    }
546    
547    public static Builder newBuilder() { return Builder.create(); }
548    public Builder newBuilderForType() { return newBuilder(); }
549    public static Builder newBuilder(org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto prototype) {
550      return newBuilder().mergeFrom(prototype);
551    }
552    public Builder toBuilder() { return newBuilder(this); }
553    
554    @java.lang.Override
555    protected Builder newBuilderForType(
556        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
557      Builder builder = new Builder(parent);
558      return builder;
559    }
560    public static final class Builder extends
561        com.google.protobuf.GeneratedMessage.Builder<Builder>
562       implements org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProtoOrBuilder {
563      public static final com.google.protobuf.Descriptors.Descriptor
564          getDescriptor() {
565        return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.internal_static_hadoop_common_CedeActiveResponseProto_descriptor;
566      }
567      
568      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
569          internalGetFieldAccessorTable() {
570        return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.internal_static_hadoop_common_CedeActiveResponseProto_fieldAccessorTable;
571      }
572      
573      // Construct using org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto.newBuilder()
574      private Builder() {
575        maybeForceBuilderInitialization();
576      }
577      
578      private Builder(BuilderParent parent) {
579        super(parent);
580        maybeForceBuilderInitialization();
581      }
582      private void maybeForceBuilderInitialization() {
583        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
584        }
585      }
586      private static Builder create() {
587        return new Builder();
588      }
589      
590      public Builder clear() {
591        super.clear();
592        return this;
593      }
594      
595      public Builder clone() {
596        return create().mergeFrom(buildPartial());
597      }
598      
599      public com.google.protobuf.Descriptors.Descriptor
600          getDescriptorForType() {
601        return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto.getDescriptor();
602      }
603      
604      public org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto getDefaultInstanceForType() {
605        return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto.getDefaultInstance();
606      }
607      
608      public org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto build() {
609        org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto result = buildPartial();
610        if (!result.isInitialized()) {
611          throw newUninitializedMessageException(result);
612        }
613        return result;
614      }
615      
616      private org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto buildParsed()
617          throws com.google.protobuf.InvalidProtocolBufferException {
618        org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto result = buildPartial();
619        if (!result.isInitialized()) {
620          throw newUninitializedMessageException(
621            result).asInvalidProtocolBufferException();
622        }
623        return result;
624      }
625      
626      public org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto buildPartial() {
627        org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto result = new org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto(this);
628        onBuilt();
629        return result;
630      }
631      
632      public Builder mergeFrom(com.google.protobuf.Message other) {
633        if (other instanceof org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto) {
634          return mergeFrom((org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto)other);
635        } else {
636          super.mergeFrom(other);
637          return this;
638        }
639      }
640      
641      public Builder mergeFrom(org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto other) {
642        if (other == org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto.getDefaultInstance()) return this;
643        this.mergeUnknownFields(other.getUnknownFields());
644        return this;
645      }
646      
647      public final boolean isInitialized() {
648        return true;
649      }
650      
651      public Builder mergeFrom(
652          com.google.protobuf.CodedInputStream input,
653          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
654          throws java.io.IOException {
655        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
656          com.google.protobuf.UnknownFieldSet.newBuilder(
657            this.getUnknownFields());
658        while (true) {
659          int tag = input.readTag();
660          switch (tag) {
661            case 0:
662              this.setUnknownFields(unknownFields.build());
663              onChanged();
664              return this;
665            default: {
666              if (!parseUnknownField(input, unknownFields,
667                                     extensionRegistry, tag)) {
668                this.setUnknownFields(unknownFields.build());
669                onChanged();
670                return this;
671              }
672              break;
673            }
674          }
675        }
676      }
677      
678      
679      // @@protoc_insertion_point(builder_scope:hadoop.common.CedeActiveResponseProto)
680    }
681    
682    static {
683      defaultInstance = new CedeActiveResponseProto(true);
684      defaultInstance.initFields();
685    }
686    
687    // @@protoc_insertion_point(class_scope:hadoop.common.CedeActiveResponseProto)
688  }
689  
690  public interface GracefulFailoverRequestProtoOrBuilder
691      extends com.google.protobuf.MessageOrBuilder {
692  }
693  public static final class GracefulFailoverRequestProto extends
694      com.google.protobuf.GeneratedMessage
695      implements GracefulFailoverRequestProtoOrBuilder {
696    // Use GracefulFailoverRequestProto.newBuilder() to construct.
697    private GracefulFailoverRequestProto(Builder builder) {
698      super(builder);
699    }
700    private GracefulFailoverRequestProto(boolean noInit) {}
701    
702    private static final GracefulFailoverRequestProto defaultInstance;
703    public static GracefulFailoverRequestProto getDefaultInstance() {
704      return defaultInstance;
705    }
706    
707    public GracefulFailoverRequestProto getDefaultInstanceForType() {
708      return defaultInstance;
709    }
710    
711    public static final com.google.protobuf.Descriptors.Descriptor
712        getDescriptor() {
713      return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.internal_static_hadoop_common_GracefulFailoverRequestProto_descriptor;
714    }
715    
716    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
717        internalGetFieldAccessorTable() {
718      return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.internal_static_hadoop_common_GracefulFailoverRequestProto_fieldAccessorTable;
719    }
720    
721    private void initFields() {
722    }
723    private byte memoizedIsInitialized = -1;
724    public final boolean isInitialized() {
725      byte isInitialized = memoizedIsInitialized;
726      if (isInitialized != -1) return isInitialized == 1;
727      
728      memoizedIsInitialized = 1;
729      return true;
730    }
731    
732    public void writeTo(com.google.protobuf.CodedOutputStream output)
733                        throws java.io.IOException {
734      getSerializedSize();
735      getUnknownFields().writeTo(output);
736    }
737    
738    private int memoizedSerializedSize = -1;
739    public int getSerializedSize() {
740      int size = memoizedSerializedSize;
741      if (size != -1) return size;
742    
743      size = 0;
744      size += getUnknownFields().getSerializedSize();
745      memoizedSerializedSize = size;
746      return size;
747    }
748    
749    private static final long serialVersionUID = 0L;
750    @java.lang.Override
751    protected java.lang.Object writeReplace()
752        throws java.io.ObjectStreamException {
753      return super.writeReplace();
754    }
755    
756    @java.lang.Override
757    public boolean equals(final java.lang.Object obj) {
758      if (obj == this) {
759       return true;
760      }
761      if (!(obj instanceof org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto)) {
762        return super.equals(obj);
763      }
764      org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto other = (org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto) obj;
765      
766      boolean result = true;
767      result = result &&
768          getUnknownFields().equals(other.getUnknownFields());
769      return result;
770    }
771    
772    @java.lang.Override
773    public int hashCode() {
774      int hash = 41;
775      hash = (19 * hash) + getDescriptorForType().hashCode();
776      hash = (29 * hash) + getUnknownFields().hashCode();
777      return hash;
778    }
779    
780    public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto parseFrom(
781        com.google.protobuf.ByteString data)
782        throws com.google.protobuf.InvalidProtocolBufferException {
783      return newBuilder().mergeFrom(data).buildParsed();
784    }
785    public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto parseFrom(
786        com.google.protobuf.ByteString data,
787        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
788        throws com.google.protobuf.InvalidProtocolBufferException {
789      return newBuilder().mergeFrom(data, extensionRegistry)
790               .buildParsed();
791    }
792    public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto parseFrom(byte[] data)
793        throws com.google.protobuf.InvalidProtocolBufferException {
794      return newBuilder().mergeFrom(data).buildParsed();
795    }
796    public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto parseFrom(
797        byte[] data,
798        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
799        throws com.google.protobuf.InvalidProtocolBufferException {
800      return newBuilder().mergeFrom(data, extensionRegistry)
801               .buildParsed();
802    }
803    public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto parseFrom(java.io.InputStream input)
804        throws java.io.IOException {
805      return newBuilder().mergeFrom(input).buildParsed();
806    }
807    public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto parseFrom(
808        java.io.InputStream input,
809        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
810        throws java.io.IOException {
811      return newBuilder().mergeFrom(input, extensionRegistry)
812               .buildParsed();
813    }
814    public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto parseDelimitedFrom(java.io.InputStream input)
815        throws java.io.IOException {
816      Builder builder = newBuilder();
817      if (builder.mergeDelimitedFrom(input)) {
818        return builder.buildParsed();
819      } else {
820        return null;
821      }
822    }
823    public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto parseDelimitedFrom(
824        java.io.InputStream input,
825        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
826        throws java.io.IOException {
827      Builder builder = newBuilder();
828      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
829        return builder.buildParsed();
830      } else {
831        return null;
832      }
833    }
834    public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto parseFrom(
835        com.google.protobuf.CodedInputStream input)
836        throws java.io.IOException {
837      return newBuilder().mergeFrom(input).buildParsed();
838    }
839    public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto parseFrom(
840        com.google.protobuf.CodedInputStream input,
841        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
842        throws java.io.IOException {
843      return newBuilder().mergeFrom(input, extensionRegistry)
844               .buildParsed();
845    }
846    
847    public static Builder newBuilder() { return Builder.create(); }
848    public Builder newBuilderForType() { return newBuilder(); }
849    public static Builder newBuilder(org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto prototype) {
850      return newBuilder().mergeFrom(prototype);
851    }
852    public Builder toBuilder() { return newBuilder(this); }
853    
854    @java.lang.Override
855    protected Builder newBuilderForType(
856        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
857      Builder builder = new Builder(parent);
858      return builder;
859    }
860    public static final class Builder extends
861        com.google.protobuf.GeneratedMessage.Builder<Builder>
862       implements org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProtoOrBuilder {
863      public static final com.google.protobuf.Descriptors.Descriptor
864          getDescriptor() {
865        return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.internal_static_hadoop_common_GracefulFailoverRequestProto_descriptor;
866      }
867      
868      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
869          internalGetFieldAccessorTable() {
870        return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.internal_static_hadoop_common_GracefulFailoverRequestProto_fieldAccessorTable;
871      }
872      
873      // Construct using org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto.newBuilder()
874      private Builder() {
875        maybeForceBuilderInitialization();
876      }
877      
878      private Builder(BuilderParent parent) {
879        super(parent);
880        maybeForceBuilderInitialization();
881      }
882      private void maybeForceBuilderInitialization() {
883        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
884        }
885      }
886      private static Builder create() {
887        return new Builder();
888      }
889      
890      public Builder clear() {
891        super.clear();
892        return this;
893      }
894      
895      public Builder clone() {
896        return create().mergeFrom(buildPartial());
897      }
898      
899      public com.google.protobuf.Descriptors.Descriptor
900          getDescriptorForType() {
901        return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto.getDescriptor();
902      }
903      
904      public org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto getDefaultInstanceForType() {
905        return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto.getDefaultInstance();
906      }
907      
908      public org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto build() {
909        org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto result = buildPartial();
910        if (!result.isInitialized()) {
911          throw newUninitializedMessageException(result);
912        }
913        return result;
914      }
915      
916      private org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto buildParsed()
917          throws com.google.protobuf.InvalidProtocolBufferException {
918        org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto result = buildPartial();
919        if (!result.isInitialized()) {
920          throw newUninitializedMessageException(
921            result).asInvalidProtocolBufferException();
922        }
923        return result;
924      }
925      
926      public org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto buildPartial() {
927        org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto result = new org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto(this);
928        onBuilt();
929        return result;
930      }
931      
932      public Builder mergeFrom(com.google.protobuf.Message other) {
933        if (other instanceof org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto) {
934          return mergeFrom((org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto)other);
935        } else {
936          super.mergeFrom(other);
937          return this;
938        }
939      }
940      
941      public Builder mergeFrom(org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto other) {
942        if (other == org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto.getDefaultInstance()) return this;
943        this.mergeUnknownFields(other.getUnknownFields());
944        return this;
945      }
946      
947      public final boolean isInitialized() {
948        return true;
949      }
950      
951      public Builder mergeFrom(
952          com.google.protobuf.CodedInputStream input,
953          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
954          throws java.io.IOException {
955        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
956          com.google.protobuf.UnknownFieldSet.newBuilder(
957            this.getUnknownFields());
958        while (true) {
959          int tag = input.readTag();
960          switch (tag) {
961            case 0:
962              this.setUnknownFields(unknownFields.build());
963              onChanged();
964              return this;
965            default: {
966              if (!parseUnknownField(input, unknownFields,
967                                     extensionRegistry, tag)) {
968                this.setUnknownFields(unknownFields.build());
969                onChanged();
970                return this;
971              }
972              break;
973            }
974          }
975        }
976      }
977      
978      
979      // @@protoc_insertion_point(builder_scope:hadoop.common.GracefulFailoverRequestProto)
980    }
981    
982    static {
983      defaultInstance = new GracefulFailoverRequestProto(true);
984      defaultInstance.initFields();
985    }
986    
987    // @@protoc_insertion_point(class_scope:hadoop.common.GracefulFailoverRequestProto)
988  }
989  
990  public interface GracefulFailoverResponseProtoOrBuilder
991      extends com.google.protobuf.MessageOrBuilder {
992  }
993  public static final class GracefulFailoverResponseProto extends
994      com.google.protobuf.GeneratedMessage
995      implements GracefulFailoverResponseProtoOrBuilder {
996    // Use GracefulFailoverResponseProto.newBuilder() to construct.
997    private GracefulFailoverResponseProto(Builder builder) {
998      super(builder);
999    }
1000    private GracefulFailoverResponseProto(boolean noInit) {}
1001    
1002    private static final GracefulFailoverResponseProto defaultInstance;
1003    public static GracefulFailoverResponseProto getDefaultInstance() {
1004      return defaultInstance;
1005    }
1006    
1007    public GracefulFailoverResponseProto getDefaultInstanceForType() {
1008      return defaultInstance;
1009    }
1010    
1011    public static final com.google.protobuf.Descriptors.Descriptor
1012        getDescriptor() {
1013      return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.internal_static_hadoop_common_GracefulFailoverResponseProto_descriptor;
1014    }
1015    
1016    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1017        internalGetFieldAccessorTable() {
1018      return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.internal_static_hadoop_common_GracefulFailoverResponseProto_fieldAccessorTable;
1019    }
1020    
1021    private void initFields() {
1022    }
1023    private byte memoizedIsInitialized = -1;
1024    public final boolean isInitialized() {
1025      byte isInitialized = memoizedIsInitialized;
1026      if (isInitialized != -1) return isInitialized == 1;
1027      
1028      memoizedIsInitialized = 1;
1029      return true;
1030    }
1031    
1032    public void writeTo(com.google.protobuf.CodedOutputStream output)
1033                        throws java.io.IOException {
1034      getSerializedSize();
1035      getUnknownFields().writeTo(output);
1036    }
1037    
1038    private int memoizedSerializedSize = -1;
1039    public int getSerializedSize() {
1040      int size = memoizedSerializedSize;
1041      if (size != -1) return size;
1042    
1043      size = 0;
1044      size += getUnknownFields().getSerializedSize();
1045      memoizedSerializedSize = size;
1046      return size;
1047    }
1048    
1049    private static final long serialVersionUID = 0L;
1050    @java.lang.Override
1051    protected java.lang.Object writeReplace()
1052        throws java.io.ObjectStreamException {
1053      return super.writeReplace();
1054    }
1055    
1056    @java.lang.Override
1057    public boolean equals(final java.lang.Object obj) {
1058      if (obj == this) {
1059       return true;
1060      }
1061      if (!(obj instanceof org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto)) {
1062        return super.equals(obj);
1063      }
1064      org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto other = (org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto) obj;
1065      
1066      boolean result = true;
1067      result = result &&
1068          getUnknownFields().equals(other.getUnknownFields());
1069      return result;
1070    }
1071    
1072    @java.lang.Override
1073    public int hashCode() {
1074      int hash = 41;
1075      hash = (19 * hash) + getDescriptorForType().hashCode();
1076      hash = (29 * hash) + getUnknownFields().hashCode();
1077      return hash;
1078    }
1079    
1080    public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto parseFrom(
1081        com.google.protobuf.ByteString data)
1082        throws com.google.protobuf.InvalidProtocolBufferException {
1083      return newBuilder().mergeFrom(data).buildParsed();
1084    }
1085    public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto parseFrom(
1086        com.google.protobuf.ByteString data,
1087        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1088        throws com.google.protobuf.InvalidProtocolBufferException {
1089      return newBuilder().mergeFrom(data, extensionRegistry)
1090               .buildParsed();
1091    }
1092    public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto parseFrom(byte[] data)
1093        throws com.google.protobuf.InvalidProtocolBufferException {
1094      return newBuilder().mergeFrom(data).buildParsed();
1095    }
1096    public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto parseFrom(
1097        byte[] data,
1098        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1099        throws com.google.protobuf.InvalidProtocolBufferException {
1100      return newBuilder().mergeFrom(data, extensionRegistry)
1101               .buildParsed();
1102    }
1103    public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto parseFrom(java.io.InputStream input)
1104        throws java.io.IOException {
1105      return newBuilder().mergeFrom(input).buildParsed();
1106    }
1107    public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto parseFrom(
1108        java.io.InputStream input,
1109        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1110        throws java.io.IOException {
1111      return newBuilder().mergeFrom(input, extensionRegistry)
1112               .buildParsed();
1113    }
1114    public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto parseDelimitedFrom(java.io.InputStream input)
1115        throws java.io.IOException {
1116      Builder builder = newBuilder();
1117      if (builder.mergeDelimitedFrom(input)) {
1118        return builder.buildParsed();
1119      } else {
1120        return null;
1121      }
1122    }
1123    public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto parseDelimitedFrom(
1124        java.io.InputStream input,
1125        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1126        throws java.io.IOException {
1127      Builder builder = newBuilder();
1128      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
1129        return builder.buildParsed();
1130      } else {
1131        return null;
1132      }
1133    }
1134    public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto parseFrom(
1135        com.google.protobuf.CodedInputStream input)
1136        throws java.io.IOException {
1137      return newBuilder().mergeFrom(input).buildParsed();
1138    }
1139    public static org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto parseFrom(
1140        com.google.protobuf.CodedInputStream input,
1141        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1142        throws java.io.IOException {
1143      return newBuilder().mergeFrom(input, extensionRegistry)
1144               .buildParsed();
1145    }
1146    
1147    public static Builder newBuilder() { return Builder.create(); }
1148    public Builder newBuilderForType() { return newBuilder(); }
1149    public static Builder newBuilder(org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto prototype) {
1150      return newBuilder().mergeFrom(prototype);
1151    }
1152    public Builder toBuilder() { return newBuilder(this); }
1153    
1154    @java.lang.Override
1155    protected Builder newBuilderForType(
1156        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1157      Builder builder = new Builder(parent);
1158      return builder;
1159    }
1160    public static final class Builder extends
1161        com.google.protobuf.GeneratedMessage.Builder<Builder>
1162       implements org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProtoOrBuilder {
1163      public static final com.google.protobuf.Descriptors.Descriptor
1164          getDescriptor() {
1165        return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.internal_static_hadoop_common_GracefulFailoverResponseProto_descriptor;
1166      }
1167      
1168      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1169          internalGetFieldAccessorTable() {
1170        return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.internal_static_hadoop_common_GracefulFailoverResponseProto_fieldAccessorTable;
1171      }
1172      
1173      // Construct using org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto.newBuilder()
1174      private Builder() {
1175        maybeForceBuilderInitialization();
1176      }
1177      
1178      private Builder(BuilderParent parent) {
1179        super(parent);
1180        maybeForceBuilderInitialization();
1181      }
1182      private void maybeForceBuilderInitialization() {
1183        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
1184        }
1185      }
1186      private static Builder create() {
1187        return new Builder();
1188      }
1189      
1190      public Builder clear() {
1191        super.clear();
1192        return this;
1193      }
1194      
1195      public Builder clone() {
1196        return create().mergeFrom(buildPartial());
1197      }
1198      
1199      public com.google.protobuf.Descriptors.Descriptor
1200          getDescriptorForType() {
1201        return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto.getDescriptor();
1202      }
1203      
1204      public org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto getDefaultInstanceForType() {
1205        return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto.getDefaultInstance();
1206      }
1207      
1208      public org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto build() {
1209        org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto result = buildPartial();
1210        if (!result.isInitialized()) {
1211          throw newUninitializedMessageException(result);
1212        }
1213        return result;
1214      }
1215      
1216      private org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto buildParsed()
1217          throws com.google.protobuf.InvalidProtocolBufferException {
1218        org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto result = buildPartial();
1219        if (!result.isInitialized()) {
1220          throw newUninitializedMessageException(
1221            result).asInvalidProtocolBufferException();
1222        }
1223        return result;
1224      }
1225      
1226      public org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto buildPartial() {
1227        org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto result = new org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto(this);
1228        onBuilt();
1229        return result;
1230      }
1231      
1232      public Builder mergeFrom(com.google.protobuf.Message other) {
1233        if (other instanceof org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto) {
1234          return mergeFrom((org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto)other);
1235        } else {
1236          super.mergeFrom(other);
1237          return this;
1238        }
1239      }
1240      
1241      public Builder mergeFrom(org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto other) {
1242        if (other == org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto.getDefaultInstance()) return this;
1243        this.mergeUnknownFields(other.getUnknownFields());
1244        return this;
1245      }
1246      
1247      public final boolean isInitialized() {
1248        return true;
1249      }
1250      
1251      public Builder mergeFrom(
1252          com.google.protobuf.CodedInputStream input,
1253          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1254          throws java.io.IOException {
1255        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
1256          com.google.protobuf.UnknownFieldSet.newBuilder(
1257            this.getUnknownFields());
1258        while (true) {
1259          int tag = input.readTag();
1260          switch (tag) {
1261            case 0:
1262              this.setUnknownFields(unknownFields.build());
1263              onChanged();
1264              return this;
1265            default: {
1266              if (!parseUnknownField(input, unknownFields,
1267                                     extensionRegistry, tag)) {
1268                this.setUnknownFields(unknownFields.build());
1269                onChanged();
1270                return this;
1271              }
1272              break;
1273            }
1274          }
1275        }
1276      }
1277      
1278      
1279      // @@protoc_insertion_point(builder_scope:hadoop.common.GracefulFailoverResponseProto)
1280    }
1281    
1282    static {
1283      defaultInstance = new GracefulFailoverResponseProto(true);
1284      defaultInstance.initFields();
1285    }
1286    
1287    // @@protoc_insertion_point(class_scope:hadoop.common.GracefulFailoverResponseProto)
1288  }
1289  
1290  public static abstract class ZKFCProtocolService
1291      implements com.google.protobuf.Service {
1292    protected ZKFCProtocolService() {}
1293    
1294    public interface Interface {
1295      public abstract void cedeActive(
1296          com.google.protobuf.RpcController controller,
1297          org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto request,
1298          com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto> done);
1299      
1300      public abstract void gracefulFailover(
1301          com.google.protobuf.RpcController controller,
1302          org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto request,
1303          com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto> done);
1304      
1305    }
1306    
1307    public static com.google.protobuf.Service newReflectiveService(
1308        final Interface impl) {
1309      return new ZKFCProtocolService() {
1310        @java.lang.Override
1311        public  void cedeActive(
1312            com.google.protobuf.RpcController controller,
1313            org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto request,
1314            com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto> done) {
1315          impl.cedeActive(controller, request, done);
1316        }
1317        
1318        @java.lang.Override
1319        public  void gracefulFailover(
1320            com.google.protobuf.RpcController controller,
1321            org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto request,
1322            com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto> done) {
1323          impl.gracefulFailover(controller, request, done);
1324        }
1325        
1326      };
1327    }
1328    
1329    public static com.google.protobuf.BlockingService
1330        newReflectiveBlockingService(final BlockingInterface impl) {
1331      return new com.google.protobuf.BlockingService() {
1332        public final com.google.protobuf.Descriptors.ServiceDescriptor
1333            getDescriptorForType() {
1334          return getDescriptor();
1335        }
1336        
1337        public final com.google.protobuf.Message callBlockingMethod(
1338            com.google.protobuf.Descriptors.MethodDescriptor method,
1339            com.google.protobuf.RpcController controller,
1340            com.google.protobuf.Message request)
1341            throws com.google.protobuf.ServiceException {
1342          if (method.getService() != getDescriptor()) {
1343            throw new java.lang.IllegalArgumentException(
1344              "Service.callBlockingMethod() given method descriptor for " +
1345              "wrong service type.");
1346          }
1347          switch(method.getIndex()) {
1348            case 0:
1349              return impl.cedeActive(controller, (org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto)request);
1350            case 1:
1351              return impl.gracefulFailover(controller, (org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto)request);
1352            default:
1353              throw new java.lang.AssertionError("Can't get here.");
1354          }
1355        }
1356        
1357        public final com.google.protobuf.Message
1358            getRequestPrototype(
1359            com.google.protobuf.Descriptors.MethodDescriptor method) {
1360          if (method.getService() != getDescriptor()) {
1361            throw new java.lang.IllegalArgumentException(
1362              "Service.getRequestPrototype() given method " +
1363              "descriptor for wrong service type.");
1364          }
1365          switch(method.getIndex()) {
1366            case 0:
1367              return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto.getDefaultInstance();
1368            case 1:
1369              return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto.getDefaultInstance();
1370            default:
1371              throw new java.lang.AssertionError("Can't get here.");
1372          }
1373        }
1374        
1375        public final com.google.protobuf.Message
1376            getResponsePrototype(
1377            com.google.protobuf.Descriptors.MethodDescriptor method) {
1378          if (method.getService() != getDescriptor()) {
1379            throw new java.lang.IllegalArgumentException(
1380              "Service.getResponsePrototype() given method " +
1381              "descriptor for wrong service type.");
1382          }
1383          switch(method.getIndex()) {
1384            case 0:
1385              return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto.getDefaultInstance();
1386            case 1:
1387              return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto.getDefaultInstance();
1388            default:
1389              throw new java.lang.AssertionError("Can't get here.");
1390          }
1391        }
1392        
1393      };
1394    }
1395    
1396    public abstract void cedeActive(
1397        com.google.protobuf.RpcController controller,
1398        org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto request,
1399        com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto> done);
1400    
1401    public abstract void gracefulFailover(
1402        com.google.protobuf.RpcController controller,
1403        org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto request,
1404        com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto> done);
1405    
1406    public static final
1407        com.google.protobuf.Descriptors.ServiceDescriptor
1408        getDescriptor() {
1409      return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.getDescriptor().getServices().get(0);
1410    }
1411    public final com.google.protobuf.Descriptors.ServiceDescriptor
1412        getDescriptorForType() {
1413      return getDescriptor();
1414    }
1415    
1416    public final void callMethod(
1417        com.google.protobuf.Descriptors.MethodDescriptor method,
1418        com.google.protobuf.RpcController controller,
1419        com.google.protobuf.Message request,
1420        com.google.protobuf.RpcCallback<
1421          com.google.protobuf.Message> done) {
1422      if (method.getService() != getDescriptor()) {
1423        throw new java.lang.IllegalArgumentException(
1424          "Service.callMethod() given method descriptor for wrong " +
1425          "service type.");
1426      }
1427      switch(method.getIndex()) {
1428        case 0:
1429          this.cedeActive(controller, (org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto)request,
1430            com.google.protobuf.RpcUtil.<org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto>specializeCallback(
1431              done));
1432          return;
1433        case 1:
1434          this.gracefulFailover(controller, (org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto)request,
1435            com.google.protobuf.RpcUtil.<org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto>specializeCallback(
1436              done));
1437          return;
1438        default:
1439          throw new java.lang.AssertionError("Can't get here.");
1440      }
1441    }
1442    
1443    public final com.google.protobuf.Message
1444        getRequestPrototype(
1445        com.google.protobuf.Descriptors.MethodDescriptor method) {
1446      if (method.getService() != getDescriptor()) {
1447        throw new java.lang.IllegalArgumentException(
1448          "Service.getRequestPrototype() given method " +
1449          "descriptor for wrong service type.");
1450      }
1451      switch(method.getIndex()) {
1452        case 0:
1453          return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto.getDefaultInstance();
1454        case 1:
1455          return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto.getDefaultInstance();
1456        default:
1457          throw new java.lang.AssertionError("Can't get here.");
1458      }
1459    }
1460    
1461    public final com.google.protobuf.Message
1462        getResponsePrototype(
1463        com.google.protobuf.Descriptors.MethodDescriptor method) {
1464      if (method.getService() != getDescriptor()) {
1465        throw new java.lang.IllegalArgumentException(
1466          "Service.getResponsePrototype() given method " +
1467          "descriptor for wrong service type.");
1468      }
1469      switch(method.getIndex()) {
1470        case 0:
1471          return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto.getDefaultInstance();
1472        case 1:
1473          return org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto.getDefaultInstance();
1474        default:
1475          throw new java.lang.AssertionError("Can't get here.");
1476      }
1477    }
1478    
1479    public static Stub newStub(
1480        com.google.protobuf.RpcChannel channel) {
1481      return new Stub(channel);
1482    }
1483    
1484    public static final class Stub extends org.apache.hadoop.ha.proto.ZKFCProtocolProtos.ZKFCProtocolService implements Interface {
1485      private Stub(com.google.protobuf.RpcChannel channel) {
1486        this.channel = channel;
1487      }
1488      
1489      private final com.google.protobuf.RpcChannel channel;
1490      
1491      public com.google.protobuf.RpcChannel getChannel() {
1492        return channel;
1493      }
1494      
1495      public  void cedeActive(
1496          com.google.protobuf.RpcController controller,
1497          org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto request,
1498          com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto> done) {
1499        channel.callMethod(
1500          getDescriptor().getMethods().get(0),
1501          controller,
1502          request,
1503          org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto.getDefaultInstance(),
1504          com.google.protobuf.RpcUtil.generalizeCallback(
1505            done,
1506            org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto.class,
1507            org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto.getDefaultInstance()));
1508      }
1509      
1510      public  void gracefulFailover(
1511          com.google.protobuf.RpcController controller,
1512          org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto request,
1513          com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto> done) {
1514        channel.callMethod(
1515          getDescriptor().getMethods().get(1),
1516          controller,
1517          request,
1518          org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto.getDefaultInstance(),
1519          com.google.protobuf.RpcUtil.generalizeCallback(
1520            done,
1521            org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto.class,
1522            org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto.getDefaultInstance()));
1523      }
1524    }
1525    
1526    public static BlockingInterface newBlockingStub(
1527        com.google.protobuf.BlockingRpcChannel channel) {
1528      return new BlockingStub(channel);
1529    }
1530    
1531    public interface BlockingInterface {
1532      public org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto cedeActive(
1533          com.google.protobuf.RpcController controller,
1534          org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto request)
1535          throws com.google.protobuf.ServiceException;
1536      
1537      public org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto gracefulFailover(
1538          com.google.protobuf.RpcController controller,
1539          org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto request)
1540          throws com.google.protobuf.ServiceException;
1541    }
1542    
1543    private static final class BlockingStub implements BlockingInterface {
1544      private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
1545        this.channel = channel;
1546      }
1547      
1548      private final com.google.protobuf.BlockingRpcChannel channel;
1549      
1550      public org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto cedeActive(
1551          com.google.protobuf.RpcController controller,
1552          org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto request)
1553          throws com.google.protobuf.ServiceException {
1554        return (org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto) channel.callBlockingMethod(
1555          getDescriptor().getMethods().get(0),
1556          controller,
1557          request,
1558          org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto.getDefaultInstance());
1559      }
1560      
1561      
1562      public org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto gracefulFailover(
1563          com.google.protobuf.RpcController controller,
1564          org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto request)
1565          throws com.google.protobuf.ServiceException {
1566        return (org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto) channel.callBlockingMethod(
1567          getDescriptor().getMethods().get(1),
1568          controller,
1569          request,
1570          org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto.getDefaultInstance());
1571      }
1572      
1573    }
1574  }
1575  
1576  private static com.google.protobuf.Descriptors.Descriptor
1577    internal_static_hadoop_common_CedeActiveRequestProto_descriptor;
1578  private static
1579    com.google.protobuf.GeneratedMessage.FieldAccessorTable
1580      internal_static_hadoop_common_CedeActiveRequestProto_fieldAccessorTable;
1581  private static com.google.protobuf.Descriptors.Descriptor
1582    internal_static_hadoop_common_CedeActiveResponseProto_descriptor;
1583  private static
1584    com.google.protobuf.GeneratedMessage.FieldAccessorTable
1585      internal_static_hadoop_common_CedeActiveResponseProto_fieldAccessorTable;
1586  private static com.google.protobuf.Descriptors.Descriptor
1587    internal_static_hadoop_common_GracefulFailoverRequestProto_descriptor;
1588  private static
1589    com.google.protobuf.GeneratedMessage.FieldAccessorTable
1590      internal_static_hadoop_common_GracefulFailoverRequestProto_fieldAccessorTable;
1591  private static com.google.protobuf.Descriptors.Descriptor
1592    internal_static_hadoop_common_GracefulFailoverResponseProto_descriptor;
1593  private static
1594    com.google.protobuf.GeneratedMessage.FieldAccessorTable
1595      internal_static_hadoop_common_GracefulFailoverResponseProto_fieldAccessorTable;
1596  
1597  public static com.google.protobuf.Descriptors.FileDescriptor
1598      getDescriptor() {
1599    return descriptor;
1600  }
1601  private static com.google.protobuf.Descriptors.FileDescriptor
1602      descriptor;
1603  static {
1604    java.lang.String[] descriptorData = {
1605      "\n\022ZKFCProtocol.proto\022\rhadoop.common\".\n\026C" +
1606      "edeActiveRequestProto\022\024\n\014millisToCede\030\001 " +
1607      "\002(\r\"\031\n\027CedeActiveResponseProto\"\036\n\034Gracef" +
1608      "ulFailoverRequestProto\"\037\n\035GracefulFailov" +
1609      "erResponseProto2\341\001\n\023ZKFCProtocolService\022" +
1610      "[\n\ncedeActive\022%.hadoop.common.CedeActive" +
1611      "RequestProto\032&.hadoop.common.CedeActiveR" +
1612      "esponseProto\022m\n\020gracefulFailover\022+.hadoo" +
1613      "p.common.GracefulFailoverRequestProto\032,." +
1614      "hadoop.common.GracefulFailoverResponsePr",
1615      "otoB6\n\032org.apache.hadoop.ha.protoB\022ZKFCP" +
1616      "rotocolProtos\210\001\001\240\001\001"
1617    };
1618    com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
1619      new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
1620        public com.google.protobuf.ExtensionRegistry assignDescriptors(
1621            com.google.protobuf.Descriptors.FileDescriptor root) {
1622          descriptor = root;
1623          internal_static_hadoop_common_CedeActiveRequestProto_descriptor =
1624            getDescriptor().getMessageTypes().get(0);
1625          internal_static_hadoop_common_CedeActiveRequestProto_fieldAccessorTable = new
1626            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
1627              internal_static_hadoop_common_CedeActiveRequestProto_descriptor,
1628              new java.lang.String[] { "MillisToCede", },
1629              org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto.class,
1630              org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto.Builder.class);
1631          internal_static_hadoop_common_CedeActiveResponseProto_descriptor =
1632            getDescriptor().getMessageTypes().get(1);
1633          internal_static_hadoop_common_CedeActiveResponseProto_fieldAccessorTable = new
1634            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
1635              internal_static_hadoop_common_CedeActiveResponseProto_descriptor,
1636              new java.lang.String[] { },
1637              org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto.class,
1638              org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveResponseProto.Builder.class);
1639          internal_static_hadoop_common_GracefulFailoverRequestProto_descriptor =
1640            getDescriptor().getMessageTypes().get(2);
1641          internal_static_hadoop_common_GracefulFailoverRequestProto_fieldAccessorTable = new
1642            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
1643              internal_static_hadoop_common_GracefulFailoverRequestProto_descriptor,
1644              new java.lang.String[] { },
1645              org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto.class,
1646              org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto.Builder.class);
1647          internal_static_hadoop_common_GracefulFailoverResponseProto_descriptor =
1648            getDescriptor().getMessageTypes().get(3);
1649          internal_static_hadoop_common_GracefulFailoverResponseProto_fieldAccessorTable = new
1650            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
1651              internal_static_hadoop_common_GracefulFailoverResponseProto_descriptor,
1652              new java.lang.String[] { },
1653              org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto.class,
1654              org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverResponseProto.Builder.class);
1655          return null;
1656        }
1657      };
1658    com.google.protobuf.Descriptors.FileDescriptor
1659      .internalBuildGeneratedFileFrom(descriptorData,
1660        new com.google.protobuf.Descriptors.FileDescriptor[] {
1661        }, assigner);
1662  }
1663  
1664  // @@protoc_insertion_point(outer_class_scope)
1665}