001    // Generated by the protocol buffer compiler.  DO NOT EDIT!
002    // source: HAServiceProtocol.proto
003    
004    package org.apache.hadoop.ha.proto;
005    
006    public final class HAServiceProtocolProtos {
007      private HAServiceProtocolProtos() {}
008      public static void registerAllExtensions(
009          com.google.protobuf.ExtensionRegistry registry) {
010      }
011      public enum HAServiceStateProto
012          implements com.google.protobuf.ProtocolMessageEnum {
013        INITIALIZING(0, 0),
014        ACTIVE(1, 1),
015        STANDBY(2, 2),
016        ;
017        
018        public static final int INITIALIZING_VALUE = 0;
019        public static final int ACTIVE_VALUE = 1;
020        public static final int STANDBY_VALUE = 2;
021        
022        
023        public final int getNumber() { return value; }
024        
025        public static HAServiceStateProto valueOf(int value) {
026          switch (value) {
027            case 0: return INITIALIZING;
028            case 1: return ACTIVE;
029            case 2: return STANDBY;
030            default: return null;
031          }
032        }
033        
034        public static com.google.protobuf.Internal.EnumLiteMap<HAServiceStateProto>
035            internalGetValueMap() {
036          return internalValueMap;
037        }
038        private static com.google.protobuf.Internal.EnumLiteMap<HAServiceStateProto>
039            internalValueMap =
040              new com.google.protobuf.Internal.EnumLiteMap<HAServiceStateProto>() {
041                public HAServiceStateProto findValueByNumber(int number) {
042                  return HAServiceStateProto.valueOf(number);
043                }
044              };
045        
046        public final com.google.protobuf.Descriptors.EnumValueDescriptor
047            getValueDescriptor() {
048          return getDescriptor().getValues().get(index);
049        }
050        public final com.google.protobuf.Descriptors.EnumDescriptor
051            getDescriptorForType() {
052          return getDescriptor();
053        }
054        public static final com.google.protobuf.Descriptors.EnumDescriptor
055            getDescriptor() {
056          return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.getDescriptor().getEnumTypes().get(0);
057        }
058        
059        private static final HAServiceStateProto[] VALUES = {
060          INITIALIZING, ACTIVE, STANDBY, 
061        };
062        
063        public static HAServiceStateProto valueOf(
064            com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
065          if (desc.getType() != getDescriptor()) {
066            throw new java.lang.IllegalArgumentException(
067              "EnumValueDescriptor is not for this type.");
068          }
069          return VALUES[desc.getIndex()];
070        }
071        
072        private final int index;
073        private final int value;
074        
075        private HAServiceStateProto(int index, int value) {
076          this.index = index;
077          this.value = value;
078        }
079        
080        // @@protoc_insertion_point(enum_scope:HAServiceStateProto)
081      }
082      
083      public interface MonitorHealthRequestProtoOrBuilder
084          extends com.google.protobuf.MessageOrBuilder {
085      }
086      public static final class MonitorHealthRequestProto extends
087          com.google.protobuf.GeneratedMessage
088          implements MonitorHealthRequestProtoOrBuilder {
089        // Use MonitorHealthRequestProto.newBuilder() to construct.
090        private MonitorHealthRequestProto(Builder builder) {
091          super(builder);
092        }
093        private MonitorHealthRequestProto(boolean noInit) {}
094        
095        private static final MonitorHealthRequestProto defaultInstance;
096        public static MonitorHealthRequestProto getDefaultInstance() {
097          return defaultInstance;
098        }
099        
100        public MonitorHealthRequestProto getDefaultInstanceForType() {
101          return defaultInstance;
102        }
103        
104        public static final com.google.protobuf.Descriptors.Descriptor
105            getDescriptor() {
106          return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_MonitorHealthRequestProto_descriptor;
107        }
108        
109        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
110            internalGetFieldAccessorTable() {
111          return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_MonitorHealthRequestProto_fieldAccessorTable;
112        }
113        
114        private void initFields() {
115        }
116        private byte memoizedIsInitialized = -1;
117        public final boolean isInitialized() {
118          byte isInitialized = memoizedIsInitialized;
119          if (isInitialized != -1) return isInitialized == 1;
120          
121          memoizedIsInitialized = 1;
122          return true;
123        }
124        
125        public void writeTo(com.google.protobuf.CodedOutputStream output)
126                            throws java.io.IOException {
127          getSerializedSize();
128          getUnknownFields().writeTo(output);
129        }
130        
131        private int memoizedSerializedSize = -1;
132        public int getSerializedSize() {
133          int size = memoizedSerializedSize;
134          if (size != -1) return size;
135        
136          size = 0;
137          size += getUnknownFields().getSerializedSize();
138          memoizedSerializedSize = size;
139          return size;
140        }
141        
142        private static final long serialVersionUID = 0L;
143        @java.lang.Override
144        protected java.lang.Object writeReplace()
145            throws java.io.ObjectStreamException {
146          return super.writeReplace();
147        }
148        
149        @java.lang.Override
150        public boolean equals(final java.lang.Object obj) {
151          if (obj == this) {
152           return true;
153          }
154          if (!(obj instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto)) {
155            return super.equals(obj);
156          }
157          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto other = (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto) obj;
158          
159          boolean result = true;
160          result = result &&
161              getUnknownFields().equals(other.getUnknownFields());
162          return result;
163        }
164        
165        @java.lang.Override
166        public int hashCode() {
167          int hash = 41;
168          hash = (19 * hash) + getDescriptorForType().hashCode();
169          hash = (29 * hash) + getUnknownFields().hashCode();
170          return hash;
171        }
172        
173        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto parseFrom(
174            com.google.protobuf.ByteString data)
175            throws com.google.protobuf.InvalidProtocolBufferException {
176          return newBuilder().mergeFrom(data).buildParsed();
177        }
178        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto parseFrom(
179            com.google.protobuf.ByteString data,
180            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
181            throws com.google.protobuf.InvalidProtocolBufferException {
182          return newBuilder().mergeFrom(data, extensionRegistry)
183                   .buildParsed();
184        }
185        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto parseFrom(byte[] data)
186            throws com.google.protobuf.InvalidProtocolBufferException {
187          return newBuilder().mergeFrom(data).buildParsed();
188        }
189        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto parseFrom(
190            byte[] data,
191            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
192            throws com.google.protobuf.InvalidProtocolBufferException {
193          return newBuilder().mergeFrom(data, extensionRegistry)
194                   .buildParsed();
195        }
196        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto parseFrom(java.io.InputStream input)
197            throws java.io.IOException {
198          return newBuilder().mergeFrom(input).buildParsed();
199        }
200        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto parseFrom(
201            java.io.InputStream input,
202            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
203            throws java.io.IOException {
204          return newBuilder().mergeFrom(input, extensionRegistry)
205                   .buildParsed();
206        }
207        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto parseDelimitedFrom(java.io.InputStream input)
208            throws java.io.IOException {
209          Builder builder = newBuilder();
210          if (builder.mergeDelimitedFrom(input)) {
211            return builder.buildParsed();
212          } else {
213            return null;
214          }
215        }
216        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto parseDelimitedFrom(
217            java.io.InputStream input,
218            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
219            throws java.io.IOException {
220          Builder builder = newBuilder();
221          if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
222            return builder.buildParsed();
223          } else {
224            return null;
225          }
226        }
227        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto parseFrom(
228            com.google.protobuf.CodedInputStream input)
229            throws java.io.IOException {
230          return newBuilder().mergeFrom(input).buildParsed();
231        }
232        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto parseFrom(
233            com.google.protobuf.CodedInputStream input,
234            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
235            throws java.io.IOException {
236          return newBuilder().mergeFrom(input, extensionRegistry)
237                   .buildParsed();
238        }
239        
240        public static Builder newBuilder() { return Builder.create(); }
241        public Builder newBuilderForType() { return newBuilder(); }
242        public static Builder newBuilder(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto prototype) {
243          return newBuilder().mergeFrom(prototype);
244        }
245        public Builder toBuilder() { return newBuilder(this); }
246        
247        @java.lang.Override
248        protected Builder newBuilderForType(
249            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
250          Builder builder = new Builder(parent);
251          return builder;
252        }
253        public static final class Builder extends
254            com.google.protobuf.GeneratedMessage.Builder<Builder>
255           implements org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProtoOrBuilder {
256          public static final com.google.protobuf.Descriptors.Descriptor
257              getDescriptor() {
258            return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_MonitorHealthRequestProto_descriptor;
259          }
260          
261          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
262              internalGetFieldAccessorTable() {
263            return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_MonitorHealthRequestProto_fieldAccessorTable;
264          }
265          
266          // Construct using org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto.newBuilder()
267          private Builder() {
268            maybeForceBuilderInitialization();
269          }
270          
271          private Builder(BuilderParent parent) {
272            super(parent);
273            maybeForceBuilderInitialization();
274          }
275          private void maybeForceBuilderInitialization() {
276            if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
277            }
278          }
279          private static Builder create() {
280            return new Builder();
281          }
282          
283          public Builder clear() {
284            super.clear();
285            return this;
286          }
287          
288          public Builder clone() {
289            return create().mergeFrom(buildPartial());
290          }
291          
292          public com.google.protobuf.Descriptors.Descriptor
293              getDescriptorForType() {
294            return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto.getDescriptor();
295          }
296          
297          public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto getDefaultInstanceForType() {
298            return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto.getDefaultInstance();
299          }
300          
301          public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto build() {
302            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto result = buildPartial();
303            if (!result.isInitialized()) {
304              throw newUninitializedMessageException(result);
305            }
306            return result;
307          }
308          
309          private org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto buildParsed()
310              throws com.google.protobuf.InvalidProtocolBufferException {
311            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto result = buildPartial();
312            if (!result.isInitialized()) {
313              throw newUninitializedMessageException(
314                result).asInvalidProtocolBufferException();
315            }
316            return result;
317          }
318          
319          public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto buildPartial() {
320            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto result = new org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto(this);
321            onBuilt();
322            return result;
323          }
324          
325          public Builder mergeFrom(com.google.protobuf.Message other) {
326            if (other instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto) {
327              return mergeFrom((org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto)other);
328            } else {
329              super.mergeFrom(other);
330              return this;
331            }
332          }
333          
334          public Builder mergeFrom(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto other) {
335            if (other == org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto.getDefaultInstance()) return this;
336            this.mergeUnknownFields(other.getUnknownFields());
337            return this;
338          }
339          
340          public final boolean isInitialized() {
341            return true;
342          }
343          
344          public Builder mergeFrom(
345              com.google.protobuf.CodedInputStream input,
346              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
347              throws java.io.IOException {
348            com.google.protobuf.UnknownFieldSet.Builder unknownFields =
349              com.google.protobuf.UnknownFieldSet.newBuilder(
350                this.getUnknownFields());
351            while (true) {
352              int tag = input.readTag();
353              switch (tag) {
354                case 0:
355                  this.setUnknownFields(unknownFields.build());
356                  onChanged();
357                  return this;
358                default: {
359                  if (!parseUnknownField(input, unknownFields,
360                                         extensionRegistry, tag)) {
361                    this.setUnknownFields(unknownFields.build());
362                    onChanged();
363                    return this;
364                  }
365                  break;
366                }
367              }
368            }
369          }
370          
371          
372          // @@protoc_insertion_point(builder_scope:MonitorHealthRequestProto)
373        }
374        
375        static {
376          defaultInstance = new MonitorHealthRequestProto(true);
377          defaultInstance.initFields();
378        }
379        
380        // @@protoc_insertion_point(class_scope:MonitorHealthRequestProto)
381      }
382      
383      public interface MonitorHealthResponseProtoOrBuilder
384          extends com.google.protobuf.MessageOrBuilder {
385      }
386      public static final class MonitorHealthResponseProto extends
387          com.google.protobuf.GeneratedMessage
388          implements MonitorHealthResponseProtoOrBuilder {
389        // Use MonitorHealthResponseProto.newBuilder() to construct.
390        private MonitorHealthResponseProto(Builder builder) {
391          super(builder);
392        }
393        private MonitorHealthResponseProto(boolean noInit) {}
394        
395        private static final MonitorHealthResponseProto defaultInstance;
396        public static MonitorHealthResponseProto getDefaultInstance() {
397          return defaultInstance;
398        }
399        
400        public MonitorHealthResponseProto getDefaultInstanceForType() {
401          return defaultInstance;
402        }
403        
404        public static final com.google.protobuf.Descriptors.Descriptor
405            getDescriptor() {
406          return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_MonitorHealthResponseProto_descriptor;
407        }
408        
409        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
410            internalGetFieldAccessorTable() {
411          return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_MonitorHealthResponseProto_fieldAccessorTable;
412        }
413        
414        private void initFields() {
415        }
416        private byte memoizedIsInitialized = -1;
417        public final boolean isInitialized() {
418          byte isInitialized = memoizedIsInitialized;
419          if (isInitialized != -1) return isInitialized == 1;
420          
421          memoizedIsInitialized = 1;
422          return true;
423        }
424        
425        public void writeTo(com.google.protobuf.CodedOutputStream output)
426                            throws java.io.IOException {
427          getSerializedSize();
428          getUnknownFields().writeTo(output);
429        }
430        
431        private int memoizedSerializedSize = -1;
432        public int getSerializedSize() {
433          int size = memoizedSerializedSize;
434          if (size != -1) return size;
435        
436          size = 0;
437          size += getUnknownFields().getSerializedSize();
438          memoizedSerializedSize = size;
439          return size;
440        }
441        
442        private static final long serialVersionUID = 0L;
443        @java.lang.Override
444        protected java.lang.Object writeReplace()
445            throws java.io.ObjectStreamException {
446          return super.writeReplace();
447        }
448        
449        @java.lang.Override
450        public boolean equals(final java.lang.Object obj) {
451          if (obj == this) {
452           return true;
453          }
454          if (!(obj instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto)) {
455            return super.equals(obj);
456          }
457          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto other = (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto) obj;
458          
459          boolean result = true;
460          result = result &&
461              getUnknownFields().equals(other.getUnknownFields());
462          return result;
463        }
464        
465        @java.lang.Override
466        public int hashCode() {
467          int hash = 41;
468          hash = (19 * hash) + getDescriptorForType().hashCode();
469          hash = (29 * hash) + getUnknownFields().hashCode();
470          return hash;
471        }
472        
473        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto parseFrom(
474            com.google.protobuf.ByteString data)
475            throws com.google.protobuf.InvalidProtocolBufferException {
476          return newBuilder().mergeFrom(data).buildParsed();
477        }
478        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto parseFrom(
479            com.google.protobuf.ByteString data,
480            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
481            throws com.google.protobuf.InvalidProtocolBufferException {
482          return newBuilder().mergeFrom(data, extensionRegistry)
483                   .buildParsed();
484        }
485        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto parseFrom(byte[] data)
486            throws com.google.protobuf.InvalidProtocolBufferException {
487          return newBuilder().mergeFrom(data).buildParsed();
488        }
489        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto parseFrom(
490            byte[] data,
491            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
492            throws com.google.protobuf.InvalidProtocolBufferException {
493          return newBuilder().mergeFrom(data, extensionRegistry)
494                   .buildParsed();
495        }
496        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto parseFrom(java.io.InputStream input)
497            throws java.io.IOException {
498          return newBuilder().mergeFrom(input).buildParsed();
499        }
500        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto parseFrom(
501            java.io.InputStream input,
502            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
503            throws java.io.IOException {
504          return newBuilder().mergeFrom(input, extensionRegistry)
505                   .buildParsed();
506        }
507        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto parseDelimitedFrom(java.io.InputStream input)
508            throws java.io.IOException {
509          Builder builder = newBuilder();
510          if (builder.mergeDelimitedFrom(input)) {
511            return builder.buildParsed();
512          } else {
513            return null;
514          }
515        }
516        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto parseDelimitedFrom(
517            java.io.InputStream input,
518            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
519            throws java.io.IOException {
520          Builder builder = newBuilder();
521          if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
522            return builder.buildParsed();
523          } else {
524            return null;
525          }
526        }
527        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto parseFrom(
528            com.google.protobuf.CodedInputStream input)
529            throws java.io.IOException {
530          return newBuilder().mergeFrom(input).buildParsed();
531        }
532        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto parseFrom(
533            com.google.protobuf.CodedInputStream input,
534            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
535            throws java.io.IOException {
536          return newBuilder().mergeFrom(input, extensionRegistry)
537                   .buildParsed();
538        }
539        
540        public static Builder newBuilder() { return Builder.create(); }
541        public Builder newBuilderForType() { return newBuilder(); }
542        public static Builder newBuilder(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto prototype) {
543          return newBuilder().mergeFrom(prototype);
544        }
545        public Builder toBuilder() { return newBuilder(this); }
546        
547        @java.lang.Override
548        protected Builder newBuilderForType(
549            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
550          Builder builder = new Builder(parent);
551          return builder;
552        }
553        public static final class Builder extends
554            com.google.protobuf.GeneratedMessage.Builder<Builder>
555           implements org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProtoOrBuilder {
556          public static final com.google.protobuf.Descriptors.Descriptor
557              getDescriptor() {
558            return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_MonitorHealthResponseProto_descriptor;
559          }
560          
561          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
562              internalGetFieldAccessorTable() {
563            return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_MonitorHealthResponseProto_fieldAccessorTable;
564          }
565          
566          // Construct using org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto.newBuilder()
567          private Builder() {
568            maybeForceBuilderInitialization();
569          }
570          
571          private Builder(BuilderParent parent) {
572            super(parent);
573            maybeForceBuilderInitialization();
574          }
575          private void maybeForceBuilderInitialization() {
576            if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
577            }
578          }
579          private static Builder create() {
580            return new Builder();
581          }
582          
583          public Builder clear() {
584            super.clear();
585            return this;
586          }
587          
588          public Builder clone() {
589            return create().mergeFrom(buildPartial());
590          }
591          
592          public com.google.protobuf.Descriptors.Descriptor
593              getDescriptorForType() {
594            return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto.getDescriptor();
595          }
596          
597          public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto getDefaultInstanceForType() {
598            return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto.getDefaultInstance();
599          }
600          
601          public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto build() {
602            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto result = buildPartial();
603            if (!result.isInitialized()) {
604              throw newUninitializedMessageException(result);
605            }
606            return result;
607          }
608          
609          private org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto buildParsed()
610              throws com.google.protobuf.InvalidProtocolBufferException {
611            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto result = buildPartial();
612            if (!result.isInitialized()) {
613              throw newUninitializedMessageException(
614                result).asInvalidProtocolBufferException();
615            }
616            return result;
617          }
618          
619          public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto buildPartial() {
620            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto result = new org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto(this);
621            onBuilt();
622            return result;
623          }
624          
625          public Builder mergeFrom(com.google.protobuf.Message other) {
626            if (other instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto) {
627              return mergeFrom((org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto)other);
628            } else {
629              super.mergeFrom(other);
630              return this;
631            }
632          }
633          
634          public Builder mergeFrom(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto other) {
635            if (other == org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto.getDefaultInstance()) return this;
636            this.mergeUnknownFields(other.getUnknownFields());
637            return this;
638          }
639          
640          public final boolean isInitialized() {
641            return true;
642          }
643          
644          public Builder mergeFrom(
645              com.google.protobuf.CodedInputStream input,
646              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
647              throws java.io.IOException {
648            com.google.protobuf.UnknownFieldSet.Builder unknownFields =
649              com.google.protobuf.UnknownFieldSet.newBuilder(
650                this.getUnknownFields());
651            while (true) {
652              int tag = input.readTag();
653              switch (tag) {
654                case 0:
655                  this.setUnknownFields(unknownFields.build());
656                  onChanged();
657                  return this;
658                default: {
659                  if (!parseUnknownField(input, unknownFields,
660                                         extensionRegistry, tag)) {
661                    this.setUnknownFields(unknownFields.build());
662                    onChanged();
663                    return this;
664                  }
665                  break;
666                }
667              }
668            }
669          }
670          
671          
672          // @@protoc_insertion_point(builder_scope:MonitorHealthResponseProto)
673        }
674        
675        static {
676          defaultInstance = new MonitorHealthResponseProto(true);
677          defaultInstance.initFields();
678        }
679        
680        // @@protoc_insertion_point(class_scope:MonitorHealthResponseProto)
681      }
682      
683      public interface TransitionToActiveRequestProtoOrBuilder
684          extends com.google.protobuf.MessageOrBuilder {
685      }
686      public static final class TransitionToActiveRequestProto extends
687          com.google.protobuf.GeneratedMessage
688          implements TransitionToActiveRequestProtoOrBuilder {
689        // Use TransitionToActiveRequestProto.newBuilder() to construct.
690        private TransitionToActiveRequestProto(Builder builder) {
691          super(builder);
692        }
693        private TransitionToActiveRequestProto(boolean noInit) {}
694        
695        private static final TransitionToActiveRequestProto defaultInstance;
696        public static TransitionToActiveRequestProto getDefaultInstance() {
697          return defaultInstance;
698        }
699        
700        public TransitionToActiveRequestProto getDefaultInstanceForType() {
701          return defaultInstance;
702        }
703        
704        public static final com.google.protobuf.Descriptors.Descriptor
705            getDescriptor() {
706          return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_TransitionToActiveRequestProto_descriptor;
707        }
708        
709        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
710            internalGetFieldAccessorTable() {
711          return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_TransitionToActiveRequestProto_fieldAccessorTable;
712        }
713        
714        private void initFields() {
715        }
716        private byte memoizedIsInitialized = -1;
717        public final boolean isInitialized() {
718          byte isInitialized = memoizedIsInitialized;
719          if (isInitialized != -1) return isInitialized == 1;
720          
721          memoizedIsInitialized = 1;
722          return true;
723        }
724        
725        public void writeTo(com.google.protobuf.CodedOutputStream output)
726                            throws java.io.IOException {
727          getSerializedSize();
728          getUnknownFields().writeTo(output);
729        }
730        
731        private int memoizedSerializedSize = -1;
732        public int getSerializedSize() {
733          int size = memoizedSerializedSize;
734          if (size != -1) return size;
735        
736          size = 0;
737          size += getUnknownFields().getSerializedSize();
738          memoizedSerializedSize = size;
739          return size;
740        }
741        
742        private static final long serialVersionUID = 0L;
743        @java.lang.Override
744        protected java.lang.Object writeReplace()
745            throws java.io.ObjectStreamException {
746          return super.writeReplace();
747        }
748        
749        @java.lang.Override
750        public boolean equals(final java.lang.Object obj) {
751          if (obj == this) {
752           return true;
753          }
754          if (!(obj instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto)) {
755            return super.equals(obj);
756          }
757          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto other = (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto) obj;
758          
759          boolean result = true;
760          result = result &&
761              getUnknownFields().equals(other.getUnknownFields());
762          return result;
763        }
764        
765        @java.lang.Override
766        public int hashCode() {
767          int hash = 41;
768          hash = (19 * hash) + getDescriptorForType().hashCode();
769          hash = (29 * hash) + getUnknownFields().hashCode();
770          return hash;
771        }
772        
773        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto parseFrom(
774            com.google.protobuf.ByteString data)
775            throws com.google.protobuf.InvalidProtocolBufferException {
776          return newBuilder().mergeFrom(data).buildParsed();
777        }
778        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto parseFrom(
779            com.google.protobuf.ByteString data,
780            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
781            throws com.google.protobuf.InvalidProtocolBufferException {
782          return newBuilder().mergeFrom(data, extensionRegistry)
783                   .buildParsed();
784        }
785        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto parseFrom(byte[] data)
786            throws com.google.protobuf.InvalidProtocolBufferException {
787          return newBuilder().mergeFrom(data).buildParsed();
788        }
789        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto parseFrom(
790            byte[] data,
791            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
792            throws com.google.protobuf.InvalidProtocolBufferException {
793          return newBuilder().mergeFrom(data, extensionRegistry)
794                   .buildParsed();
795        }
796        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto parseFrom(java.io.InputStream input)
797            throws java.io.IOException {
798          return newBuilder().mergeFrom(input).buildParsed();
799        }
800        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto parseFrom(
801            java.io.InputStream input,
802            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
803            throws java.io.IOException {
804          return newBuilder().mergeFrom(input, extensionRegistry)
805                   .buildParsed();
806        }
807        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto parseDelimitedFrom(java.io.InputStream input)
808            throws java.io.IOException {
809          Builder builder = newBuilder();
810          if (builder.mergeDelimitedFrom(input)) {
811            return builder.buildParsed();
812          } else {
813            return null;
814          }
815        }
816        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto parseDelimitedFrom(
817            java.io.InputStream input,
818            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
819            throws java.io.IOException {
820          Builder builder = newBuilder();
821          if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
822            return builder.buildParsed();
823          } else {
824            return null;
825          }
826        }
827        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto parseFrom(
828            com.google.protobuf.CodedInputStream input)
829            throws java.io.IOException {
830          return newBuilder().mergeFrom(input).buildParsed();
831        }
832        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto parseFrom(
833            com.google.protobuf.CodedInputStream input,
834            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
835            throws java.io.IOException {
836          return newBuilder().mergeFrom(input, extensionRegistry)
837                   .buildParsed();
838        }
839        
840        public static Builder newBuilder() { return Builder.create(); }
841        public Builder newBuilderForType() { return newBuilder(); }
842        public static Builder newBuilder(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto prototype) {
843          return newBuilder().mergeFrom(prototype);
844        }
845        public Builder toBuilder() { return newBuilder(this); }
846        
847        @java.lang.Override
848        protected Builder newBuilderForType(
849            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
850          Builder builder = new Builder(parent);
851          return builder;
852        }
853        public static final class Builder extends
854            com.google.protobuf.GeneratedMessage.Builder<Builder>
855           implements org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProtoOrBuilder {
856          public static final com.google.protobuf.Descriptors.Descriptor
857              getDescriptor() {
858            return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_TransitionToActiveRequestProto_descriptor;
859          }
860          
861          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
862              internalGetFieldAccessorTable() {
863            return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_TransitionToActiveRequestProto_fieldAccessorTable;
864          }
865          
866          // Construct using org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto.newBuilder()
867          private Builder() {
868            maybeForceBuilderInitialization();
869          }
870          
871          private Builder(BuilderParent parent) {
872            super(parent);
873            maybeForceBuilderInitialization();
874          }
875          private void maybeForceBuilderInitialization() {
876            if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
877            }
878          }
879          private static Builder create() {
880            return new Builder();
881          }
882          
883          public Builder clear() {
884            super.clear();
885            return this;
886          }
887          
888          public Builder clone() {
889            return create().mergeFrom(buildPartial());
890          }
891          
892          public com.google.protobuf.Descriptors.Descriptor
893              getDescriptorForType() {
894            return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto.getDescriptor();
895          }
896          
897          public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto getDefaultInstanceForType() {
898            return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto.getDefaultInstance();
899          }
900          
901          public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto build() {
902            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto result = buildPartial();
903            if (!result.isInitialized()) {
904              throw newUninitializedMessageException(result);
905            }
906            return result;
907          }
908          
909          private org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto buildParsed()
910              throws com.google.protobuf.InvalidProtocolBufferException {
911            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto result = buildPartial();
912            if (!result.isInitialized()) {
913              throw newUninitializedMessageException(
914                result).asInvalidProtocolBufferException();
915            }
916            return result;
917          }
918          
919          public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto buildPartial() {
920            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto result = new org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto(this);
921            onBuilt();
922            return result;
923          }
924          
925          public Builder mergeFrom(com.google.protobuf.Message other) {
926            if (other instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto) {
927              return mergeFrom((org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto)other);
928            } else {
929              super.mergeFrom(other);
930              return this;
931            }
932          }
933          
934          public Builder mergeFrom(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto other) {
935            if (other == org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto.getDefaultInstance()) return this;
936            this.mergeUnknownFields(other.getUnknownFields());
937            return this;
938          }
939          
940          public final boolean isInitialized() {
941            return true;
942          }
943          
944          public Builder mergeFrom(
945              com.google.protobuf.CodedInputStream input,
946              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
947              throws java.io.IOException {
948            com.google.protobuf.UnknownFieldSet.Builder unknownFields =
949              com.google.protobuf.UnknownFieldSet.newBuilder(
950                this.getUnknownFields());
951            while (true) {
952              int tag = input.readTag();
953              switch (tag) {
954                case 0:
955                  this.setUnknownFields(unknownFields.build());
956                  onChanged();
957                  return this;
958                default: {
959                  if (!parseUnknownField(input, unknownFields,
960                                         extensionRegistry, tag)) {
961                    this.setUnknownFields(unknownFields.build());
962                    onChanged();
963                    return this;
964                  }
965                  break;
966                }
967              }
968            }
969          }
970          
971          
972          // @@protoc_insertion_point(builder_scope:TransitionToActiveRequestProto)
973        }
974        
975        static {
976          defaultInstance = new TransitionToActiveRequestProto(true);
977          defaultInstance.initFields();
978        }
979        
980        // @@protoc_insertion_point(class_scope:TransitionToActiveRequestProto)
981      }
982      
983      public interface TransitionToActiveResponseProtoOrBuilder
984          extends com.google.protobuf.MessageOrBuilder {
985      }
986      public static final class TransitionToActiveResponseProto extends
987          com.google.protobuf.GeneratedMessage
988          implements TransitionToActiveResponseProtoOrBuilder {
989        // Use TransitionToActiveResponseProto.newBuilder() to construct.
990        private TransitionToActiveResponseProto(Builder builder) {
991          super(builder);
992        }
993        private TransitionToActiveResponseProto(boolean noInit) {}
994        
995        private static final TransitionToActiveResponseProto defaultInstance;
996        public static TransitionToActiveResponseProto getDefaultInstance() {
997          return defaultInstance;
998        }
999        
1000        public TransitionToActiveResponseProto getDefaultInstanceForType() {
1001          return defaultInstance;
1002        }
1003        
1004        public static final com.google.protobuf.Descriptors.Descriptor
1005            getDescriptor() {
1006          return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_TransitionToActiveResponseProto_descriptor;
1007        }
1008        
1009        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1010            internalGetFieldAccessorTable() {
1011          return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_TransitionToActiveResponseProto_fieldAccessorTable;
1012        }
1013        
1014        private void initFields() {
1015        }
1016        private byte memoizedIsInitialized = -1;
1017        public final boolean isInitialized() {
1018          byte isInitialized = memoizedIsInitialized;
1019          if (isInitialized != -1) return isInitialized == 1;
1020          
1021          memoizedIsInitialized = 1;
1022          return true;
1023        }
1024        
1025        public void writeTo(com.google.protobuf.CodedOutputStream output)
1026                            throws java.io.IOException {
1027          getSerializedSize();
1028          getUnknownFields().writeTo(output);
1029        }
1030        
1031        private int memoizedSerializedSize = -1;
1032        public int getSerializedSize() {
1033          int size = memoizedSerializedSize;
1034          if (size != -1) return size;
1035        
1036          size = 0;
1037          size += getUnknownFields().getSerializedSize();
1038          memoizedSerializedSize = size;
1039          return size;
1040        }
1041        
1042        private static final long serialVersionUID = 0L;
1043        @java.lang.Override
1044        protected java.lang.Object writeReplace()
1045            throws java.io.ObjectStreamException {
1046          return super.writeReplace();
1047        }
1048        
1049        @java.lang.Override
1050        public boolean equals(final java.lang.Object obj) {
1051          if (obj == this) {
1052           return true;
1053          }
1054          if (!(obj instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto)) {
1055            return super.equals(obj);
1056          }
1057          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto other = (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto) obj;
1058          
1059          boolean result = true;
1060          result = result &&
1061              getUnknownFields().equals(other.getUnknownFields());
1062          return result;
1063        }
1064        
1065        @java.lang.Override
1066        public int hashCode() {
1067          int hash = 41;
1068          hash = (19 * hash) + getDescriptorForType().hashCode();
1069          hash = (29 * hash) + getUnknownFields().hashCode();
1070          return hash;
1071        }
1072        
1073        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto parseFrom(
1074            com.google.protobuf.ByteString data)
1075            throws com.google.protobuf.InvalidProtocolBufferException {
1076          return newBuilder().mergeFrom(data).buildParsed();
1077        }
1078        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto parseFrom(
1079            com.google.protobuf.ByteString data,
1080            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1081            throws com.google.protobuf.InvalidProtocolBufferException {
1082          return newBuilder().mergeFrom(data, extensionRegistry)
1083                   .buildParsed();
1084        }
1085        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto parseFrom(byte[] data)
1086            throws com.google.protobuf.InvalidProtocolBufferException {
1087          return newBuilder().mergeFrom(data).buildParsed();
1088        }
1089        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto parseFrom(
1090            byte[] data,
1091            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1092            throws com.google.protobuf.InvalidProtocolBufferException {
1093          return newBuilder().mergeFrom(data, extensionRegistry)
1094                   .buildParsed();
1095        }
1096        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto parseFrom(java.io.InputStream input)
1097            throws java.io.IOException {
1098          return newBuilder().mergeFrom(input).buildParsed();
1099        }
1100        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto parseFrom(
1101            java.io.InputStream input,
1102            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1103            throws java.io.IOException {
1104          return newBuilder().mergeFrom(input, extensionRegistry)
1105                   .buildParsed();
1106        }
1107        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto parseDelimitedFrom(java.io.InputStream input)
1108            throws java.io.IOException {
1109          Builder builder = newBuilder();
1110          if (builder.mergeDelimitedFrom(input)) {
1111            return builder.buildParsed();
1112          } else {
1113            return null;
1114          }
1115        }
1116        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto parseDelimitedFrom(
1117            java.io.InputStream input,
1118            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1119            throws java.io.IOException {
1120          Builder builder = newBuilder();
1121          if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
1122            return builder.buildParsed();
1123          } else {
1124            return null;
1125          }
1126        }
1127        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto parseFrom(
1128            com.google.protobuf.CodedInputStream input)
1129            throws java.io.IOException {
1130          return newBuilder().mergeFrom(input).buildParsed();
1131        }
1132        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto parseFrom(
1133            com.google.protobuf.CodedInputStream input,
1134            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1135            throws java.io.IOException {
1136          return newBuilder().mergeFrom(input, extensionRegistry)
1137                   .buildParsed();
1138        }
1139        
1140        public static Builder newBuilder() { return Builder.create(); }
1141        public Builder newBuilderForType() { return newBuilder(); }
1142        public static Builder newBuilder(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto prototype) {
1143          return newBuilder().mergeFrom(prototype);
1144        }
1145        public Builder toBuilder() { return newBuilder(this); }
1146        
1147        @java.lang.Override
1148        protected Builder newBuilderForType(
1149            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1150          Builder builder = new Builder(parent);
1151          return builder;
1152        }
1153        public static final class Builder extends
1154            com.google.protobuf.GeneratedMessage.Builder<Builder>
1155           implements org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProtoOrBuilder {
1156          public static final com.google.protobuf.Descriptors.Descriptor
1157              getDescriptor() {
1158            return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_TransitionToActiveResponseProto_descriptor;
1159          }
1160          
1161          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1162              internalGetFieldAccessorTable() {
1163            return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_TransitionToActiveResponseProto_fieldAccessorTable;
1164          }
1165          
1166          // Construct using org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto.newBuilder()
1167          private Builder() {
1168            maybeForceBuilderInitialization();
1169          }
1170          
1171          private Builder(BuilderParent parent) {
1172            super(parent);
1173            maybeForceBuilderInitialization();
1174          }
1175          private void maybeForceBuilderInitialization() {
1176            if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
1177            }
1178          }
1179          private static Builder create() {
1180            return new Builder();
1181          }
1182          
1183          public Builder clear() {
1184            super.clear();
1185            return this;
1186          }
1187          
1188          public Builder clone() {
1189            return create().mergeFrom(buildPartial());
1190          }
1191          
1192          public com.google.protobuf.Descriptors.Descriptor
1193              getDescriptorForType() {
1194            return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto.getDescriptor();
1195          }
1196          
1197          public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto getDefaultInstanceForType() {
1198            return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto.getDefaultInstance();
1199          }
1200          
1201          public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto build() {
1202            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto result = buildPartial();
1203            if (!result.isInitialized()) {
1204              throw newUninitializedMessageException(result);
1205            }
1206            return result;
1207          }
1208          
1209          private org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto buildParsed()
1210              throws com.google.protobuf.InvalidProtocolBufferException {
1211            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto result = buildPartial();
1212            if (!result.isInitialized()) {
1213              throw newUninitializedMessageException(
1214                result).asInvalidProtocolBufferException();
1215            }
1216            return result;
1217          }
1218          
1219          public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto buildPartial() {
1220            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto result = new org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto(this);
1221            onBuilt();
1222            return result;
1223          }
1224          
1225          public Builder mergeFrom(com.google.protobuf.Message other) {
1226            if (other instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto) {
1227              return mergeFrom((org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto)other);
1228            } else {
1229              super.mergeFrom(other);
1230              return this;
1231            }
1232          }
1233          
1234          public Builder mergeFrom(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto other) {
1235            if (other == org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto.getDefaultInstance()) return this;
1236            this.mergeUnknownFields(other.getUnknownFields());
1237            return this;
1238          }
1239          
1240          public final boolean isInitialized() {
1241            return true;
1242          }
1243          
1244          public Builder mergeFrom(
1245              com.google.protobuf.CodedInputStream input,
1246              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1247              throws java.io.IOException {
1248            com.google.protobuf.UnknownFieldSet.Builder unknownFields =
1249              com.google.protobuf.UnknownFieldSet.newBuilder(
1250                this.getUnknownFields());
1251            while (true) {
1252              int tag = input.readTag();
1253              switch (tag) {
1254                case 0:
1255                  this.setUnknownFields(unknownFields.build());
1256                  onChanged();
1257                  return this;
1258                default: {
1259                  if (!parseUnknownField(input, unknownFields,
1260                                         extensionRegistry, tag)) {
1261                    this.setUnknownFields(unknownFields.build());
1262                    onChanged();
1263                    return this;
1264                  }
1265                  break;
1266                }
1267              }
1268            }
1269          }
1270          
1271          
1272          // @@protoc_insertion_point(builder_scope:TransitionToActiveResponseProto)
1273        }
1274        
1275        static {
1276          defaultInstance = new TransitionToActiveResponseProto(true);
1277          defaultInstance.initFields();
1278        }
1279        
1280        // @@protoc_insertion_point(class_scope:TransitionToActiveResponseProto)
1281      }
1282      
1283      public interface TransitionToStandbyRequestProtoOrBuilder
1284          extends com.google.protobuf.MessageOrBuilder {
1285      }
1286      public static final class TransitionToStandbyRequestProto extends
1287          com.google.protobuf.GeneratedMessage
1288          implements TransitionToStandbyRequestProtoOrBuilder {
1289        // Use TransitionToStandbyRequestProto.newBuilder() to construct.
1290        private TransitionToStandbyRequestProto(Builder builder) {
1291          super(builder);
1292        }
1293        private TransitionToStandbyRequestProto(boolean noInit) {}
1294        
1295        private static final TransitionToStandbyRequestProto defaultInstance;
1296        public static TransitionToStandbyRequestProto getDefaultInstance() {
1297          return defaultInstance;
1298        }
1299        
1300        public TransitionToStandbyRequestProto getDefaultInstanceForType() {
1301          return defaultInstance;
1302        }
1303        
1304        public static final com.google.protobuf.Descriptors.Descriptor
1305            getDescriptor() {
1306          return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_TransitionToStandbyRequestProto_descriptor;
1307        }
1308        
1309        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1310            internalGetFieldAccessorTable() {
1311          return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_TransitionToStandbyRequestProto_fieldAccessorTable;
1312        }
1313        
1314        private void initFields() {
1315        }
1316        private byte memoizedIsInitialized = -1;
1317        public final boolean isInitialized() {
1318          byte isInitialized = memoizedIsInitialized;
1319          if (isInitialized != -1) return isInitialized == 1;
1320          
1321          memoizedIsInitialized = 1;
1322          return true;
1323        }
1324        
1325        public void writeTo(com.google.protobuf.CodedOutputStream output)
1326                            throws java.io.IOException {
1327          getSerializedSize();
1328          getUnknownFields().writeTo(output);
1329        }
1330        
1331        private int memoizedSerializedSize = -1;
1332        public int getSerializedSize() {
1333          int size = memoizedSerializedSize;
1334          if (size != -1) return size;
1335        
1336          size = 0;
1337          size += getUnknownFields().getSerializedSize();
1338          memoizedSerializedSize = size;
1339          return size;
1340        }
1341        
1342        private static final long serialVersionUID = 0L;
1343        @java.lang.Override
1344        protected java.lang.Object writeReplace()
1345            throws java.io.ObjectStreamException {
1346          return super.writeReplace();
1347        }
1348        
1349        @java.lang.Override
1350        public boolean equals(final java.lang.Object obj) {
1351          if (obj == this) {
1352           return true;
1353          }
1354          if (!(obj instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto)) {
1355            return super.equals(obj);
1356          }
1357          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto other = (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto) obj;
1358          
1359          boolean result = true;
1360          result = result &&
1361              getUnknownFields().equals(other.getUnknownFields());
1362          return result;
1363        }
1364        
1365        @java.lang.Override
1366        public int hashCode() {
1367          int hash = 41;
1368          hash = (19 * hash) + getDescriptorForType().hashCode();
1369          hash = (29 * hash) + getUnknownFields().hashCode();
1370          return hash;
1371        }
1372        
1373        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto parseFrom(
1374            com.google.protobuf.ByteString data)
1375            throws com.google.protobuf.InvalidProtocolBufferException {
1376          return newBuilder().mergeFrom(data).buildParsed();
1377        }
1378        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto parseFrom(
1379            com.google.protobuf.ByteString data,
1380            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1381            throws com.google.protobuf.InvalidProtocolBufferException {
1382          return newBuilder().mergeFrom(data, extensionRegistry)
1383                   .buildParsed();
1384        }
1385        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto parseFrom(byte[] data)
1386            throws com.google.protobuf.InvalidProtocolBufferException {
1387          return newBuilder().mergeFrom(data).buildParsed();
1388        }
1389        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto parseFrom(
1390            byte[] data,
1391            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1392            throws com.google.protobuf.InvalidProtocolBufferException {
1393          return newBuilder().mergeFrom(data, extensionRegistry)
1394                   .buildParsed();
1395        }
1396        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto parseFrom(java.io.InputStream input)
1397            throws java.io.IOException {
1398          return newBuilder().mergeFrom(input).buildParsed();
1399        }
1400        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto parseFrom(
1401            java.io.InputStream input,
1402            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1403            throws java.io.IOException {
1404          return newBuilder().mergeFrom(input, extensionRegistry)
1405                   .buildParsed();
1406        }
1407        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto parseDelimitedFrom(java.io.InputStream input)
1408            throws java.io.IOException {
1409          Builder builder = newBuilder();
1410          if (builder.mergeDelimitedFrom(input)) {
1411            return builder.buildParsed();
1412          } else {
1413            return null;
1414          }
1415        }
1416        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto parseDelimitedFrom(
1417            java.io.InputStream input,
1418            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1419            throws java.io.IOException {
1420          Builder builder = newBuilder();
1421          if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
1422            return builder.buildParsed();
1423          } else {
1424            return null;
1425          }
1426        }
1427        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto parseFrom(
1428            com.google.protobuf.CodedInputStream input)
1429            throws java.io.IOException {
1430          return newBuilder().mergeFrom(input).buildParsed();
1431        }
1432        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto parseFrom(
1433            com.google.protobuf.CodedInputStream input,
1434            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1435            throws java.io.IOException {
1436          return newBuilder().mergeFrom(input, extensionRegistry)
1437                   .buildParsed();
1438        }
1439        
1440        public static Builder newBuilder() { return Builder.create(); }
1441        public Builder newBuilderForType() { return newBuilder(); }
1442        public static Builder newBuilder(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto prototype) {
1443          return newBuilder().mergeFrom(prototype);
1444        }
1445        public Builder toBuilder() { return newBuilder(this); }
1446        
1447        @java.lang.Override
1448        protected Builder newBuilderForType(
1449            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1450          Builder builder = new Builder(parent);
1451          return builder;
1452        }
1453        public static final class Builder extends
1454            com.google.protobuf.GeneratedMessage.Builder<Builder>
1455           implements org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProtoOrBuilder {
1456          public static final com.google.protobuf.Descriptors.Descriptor
1457              getDescriptor() {
1458            return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_TransitionToStandbyRequestProto_descriptor;
1459          }
1460          
1461          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1462              internalGetFieldAccessorTable() {
1463            return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_TransitionToStandbyRequestProto_fieldAccessorTable;
1464          }
1465          
1466          // Construct using org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto.newBuilder()
1467          private Builder() {
1468            maybeForceBuilderInitialization();
1469          }
1470          
1471          private Builder(BuilderParent parent) {
1472            super(parent);
1473            maybeForceBuilderInitialization();
1474          }
1475          private void maybeForceBuilderInitialization() {
1476            if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
1477            }
1478          }
1479          private static Builder create() {
1480            return new Builder();
1481          }
1482          
1483          public Builder clear() {
1484            super.clear();
1485            return this;
1486          }
1487          
1488          public Builder clone() {
1489            return create().mergeFrom(buildPartial());
1490          }
1491          
1492          public com.google.protobuf.Descriptors.Descriptor
1493              getDescriptorForType() {
1494            return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto.getDescriptor();
1495          }
1496          
1497          public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto getDefaultInstanceForType() {
1498            return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto.getDefaultInstance();
1499          }
1500          
1501          public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto build() {
1502            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto result = buildPartial();
1503            if (!result.isInitialized()) {
1504              throw newUninitializedMessageException(result);
1505            }
1506            return result;
1507          }
1508          
1509          private org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto buildParsed()
1510              throws com.google.protobuf.InvalidProtocolBufferException {
1511            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto result = buildPartial();
1512            if (!result.isInitialized()) {
1513              throw newUninitializedMessageException(
1514                result).asInvalidProtocolBufferException();
1515            }
1516            return result;
1517          }
1518          
1519          public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto buildPartial() {
1520            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto result = new org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto(this);
1521            onBuilt();
1522            return result;
1523          }
1524          
1525          public Builder mergeFrom(com.google.protobuf.Message other) {
1526            if (other instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto) {
1527              return mergeFrom((org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto)other);
1528            } else {
1529              super.mergeFrom(other);
1530              return this;
1531            }
1532          }
1533          
1534          public Builder mergeFrom(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto other) {
1535            if (other == org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto.getDefaultInstance()) return this;
1536            this.mergeUnknownFields(other.getUnknownFields());
1537            return this;
1538          }
1539          
1540          public final boolean isInitialized() {
1541            return true;
1542          }
1543          
1544          public Builder mergeFrom(
1545              com.google.protobuf.CodedInputStream input,
1546              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1547              throws java.io.IOException {
1548            com.google.protobuf.UnknownFieldSet.Builder unknownFields =
1549              com.google.protobuf.UnknownFieldSet.newBuilder(
1550                this.getUnknownFields());
1551            while (true) {
1552              int tag = input.readTag();
1553              switch (tag) {
1554                case 0:
1555                  this.setUnknownFields(unknownFields.build());
1556                  onChanged();
1557                  return this;
1558                default: {
1559                  if (!parseUnknownField(input, unknownFields,
1560                                         extensionRegistry, tag)) {
1561                    this.setUnknownFields(unknownFields.build());
1562                    onChanged();
1563                    return this;
1564                  }
1565                  break;
1566                }
1567              }
1568            }
1569          }
1570          
1571          
1572          // @@protoc_insertion_point(builder_scope:TransitionToStandbyRequestProto)
1573        }
1574        
1575        static {
1576          defaultInstance = new TransitionToStandbyRequestProto(true);
1577          defaultInstance.initFields();
1578        }
1579        
1580        // @@protoc_insertion_point(class_scope:TransitionToStandbyRequestProto)
1581      }
1582      
1583      public interface TransitionToStandbyResponseProtoOrBuilder
1584          extends com.google.protobuf.MessageOrBuilder {
1585      }
1586      public static final class TransitionToStandbyResponseProto extends
1587          com.google.protobuf.GeneratedMessage
1588          implements TransitionToStandbyResponseProtoOrBuilder {
1589        // Use TransitionToStandbyResponseProto.newBuilder() to construct.
1590        private TransitionToStandbyResponseProto(Builder builder) {
1591          super(builder);
1592        }
1593        private TransitionToStandbyResponseProto(boolean noInit) {}
1594        
1595        private static final TransitionToStandbyResponseProto defaultInstance;
1596        public static TransitionToStandbyResponseProto getDefaultInstance() {
1597          return defaultInstance;
1598        }
1599        
1600        public TransitionToStandbyResponseProto getDefaultInstanceForType() {
1601          return defaultInstance;
1602        }
1603        
1604        public static final com.google.protobuf.Descriptors.Descriptor
1605            getDescriptor() {
1606          return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_TransitionToStandbyResponseProto_descriptor;
1607        }
1608        
1609        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1610            internalGetFieldAccessorTable() {
1611          return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_TransitionToStandbyResponseProto_fieldAccessorTable;
1612        }
1613        
1614        private void initFields() {
1615        }
1616        private byte memoizedIsInitialized = -1;
1617        public final boolean isInitialized() {
1618          byte isInitialized = memoizedIsInitialized;
1619          if (isInitialized != -1) return isInitialized == 1;
1620          
1621          memoizedIsInitialized = 1;
1622          return true;
1623        }
1624        
1625        public void writeTo(com.google.protobuf.CodedOutputStream output)
1626                            throws java.io.IOException {
1627          getSerializedSize();
1628          getUnknownFields().writeTo(output);
1629        }
1630        
1631        private int memoizedSerializedSize = -1;
1632        public int getSerializedSize() {
1633          int size = memoizedSerializedSize;
1634          if (size != -1) return size;
1635        
1636          size = 0;
1637          size += getUnknownFields().getSerializedSize();
1638          memoizedSerializedSize = size;
1639          return size;
1640        }
1641        
1642        private static final long serialVersionUID = 0L;
1643        @java.lang.Override
1644        protected java.lang.Object writeReplace()
1645            throws java.io.ObjectStreamException {
1646          return super.writeReplace();
1647        }
1648        
1649        @java.lang.Override
1650        public boolean equals(final java.lang.Object obj) {
1651          if (obj == this) {
1652           return true;
1653          }
1654          if (!(obj instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto)) {
1655            return super.equals(obj);
1656          }
1657          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto other = (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto) obj;
1658          
1659          boolean result = true;
1660          result = result &&
1661              getUnknownFields().equals(other.getUnknownFields());
1662          return result;
1663        }
1664        
1665        @java.lang.Override
1666        public int hashCode() {
1667          int hash = 41;
1668          hash = (19 * hash) + getDescriptorForType().hashCode();
1669          hash = (29 * hash) + getUnknownFields().hashCode();
1670          return hash;
1671        }
1672        
1673        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto parseFrom(
1674            com.google.protobuf.ByteString data)
1675            throws com.google.protobuf.InvalidProtocolBufferException {
1676          return newBuilder().mergeFrom(data).buildParsed();
1677        }
1678        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto parseFrom(
1679            com.google.protobuf.ByteString data,
1680            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1681            throws com.google.protobuf.InvalidProtocolBufferException {
1682          return newBuilder().mergeFrom(data, extensionRegistry)
1683                   .buildParsed();
1684        }
1685        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto parseFrom(byte[] data)
1686            throws com.google.protobuf.InvalidProtocolBufferException {
1687          return newBuilder().mergeFrom(data).buildParsed();
1688        }
1689        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto parseFrom(
1690            byte[] data,
1691            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1692            throws com.google.protobuf.InvalidProtocolBufferException {
1693          return newBuilder().mergeFrom(data, extensionRegistry)
1694                   .buildParsed();
1695        }
1696        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto parseFrom(java.io.InputStream input)
1697            throws java.io.IOException {
1698          return newBuilder().mergeFrom(input).buildParsed();
1699        }
1700        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto parseFrom(
1701            java.io.InputStream input,
1702            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1703            throws java.io.IOException {
1704          return newBuilder().mergeFrom(input, extensionRegistry)
1705                   .buildParsed();
1706        }
1707        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto parseDelimitedFrom(java.io.InputStream input)
1708            throws java.io.IOException {
1709          Builder builder = newBuilder();
1710          if (builder.mergeDelimitedFrom(input)) {
1711            return builder.buildParsed();
1712          } else {
1713            return null;
1714          }
1715        }
1716        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto parseDelimitedFrom(
1717            java.io.InputStream input,
1718            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1719            throws java.io.IOException {
1720          Builder builder = newBuilder();
1721          if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
1722            return builder.buildParsed();
1723          } else {
1724            return null;
1725          }
1726        }
1727        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto parseFrom(
1728            com.google.protobuf.CodedInputStream input)
1729            throws java.io.IOException {
1730          return newBuilder().mergeFrom(input).buildParsed();
1731        }
1732        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto parseFrom(
1733            com.google.protobuf.CodedInputStream input,
1734            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1735            throws java.io.IOException {
1736          return newBuilder().mergeFrom(input, extensionRegistry)
1737                   .buildParsed();
1738        }
1739        
1740        public static Builder newBuilder() { return Builder.create(); }
1741        public Builder newBuilderForType() { return newBuilder(); }
1742        public static Builder newBuilder(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto prototype) {
1743          return newBuilder().mergeFrom(prototype);
1744        }
1745        public Builder toBuilder() { return newBuilder(this); }
1746        
1747        @java.lang.Override
1748        protected Builder newBuilderForType(
1749            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1750          Builder builder = new Builder(parent);
1751          return builder;
1752        }
1753        public static final class Builder extends
1754            com.google.protobuf.GeneratedMessage.Builder<Builder>
1755           implements org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProtoOrBuilder {
1756          public static final com.google.protobuf.Descriptors.Descriptor
1757              getDescriptor() {
1758            return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_TransitionToStandbyResponseProto_descriptor;
1759          }
1760          
1761          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1762              internalGetFieldAccessorTable() {
1763            return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_TransitionToStandbyResponseProto_fieldAccessorTable;
1764          }
1765          
1766          // Construct using org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto.newBuilder()
1767          private Builder() {
1768            maybeForceBuilderInitialization();
1769          }
1770          
1771          private Builder(BuilderParent parent) {
1772            super(parent);
1773            maybeForceBuilderInitialization();
1774          }
1775          private void maybeForceBuilderInitialization() {
1776            if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
1777            }
1778          }
1779          private static Builder create() {
1780            return new Builder();
1781          }
1782          
1783          public Builder clear() {
1784            super.clear();
1785            return this;
1786          }
1787          
1788          public Builder clone() {
1789            return create().mergeFrom(buildPartial());
1790          }
1791          
1792          public com.google.protobuf.Descriptors.Descriptor
1793              getDescriptorForType() {
1794            return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto.getDescriptor();
1795          }
1796          
1797          public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto getDefaultInstanceForType() {
1798            return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto.getDefaultInstance();
1799          }
1800          
1801          public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto build() {
1802            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto result = buildPartial();
1803            if (!result.isInitialized()) {
1804              throw newUninitializedMessageException(result);
1805            }
1806            return result;
1807          }
1808          
1809          private org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto buildParsed()
1810              throws com.google.protobuf.InvalidProtocolBufferException {
1811            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto result = buildPartial();
1812            if (!result.isInitialized()) {
1813              throw newUninitializedMessageException(
1814                result).asInvalidProtocolBufferException();
1815            }
1816            return result;
1817          }
1818          
1819          public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto buildPartial() {
1820            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto result = new org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto(this);
1821            onBuilt();
1822            return result;
1823          }
1824          
1825          public Builder mergeFrom(com.google.protobuf.Message other) {
1826            if (other instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto) {
1827              return mergeFrom((org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto)other);
1828            } else {
1829              super.mergeFrom(other);
1830              return this;
1831            }
1832          }
1833          
1834          public Builder mergeFrom(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto other) {
1835            if (other == org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto.getDefaultInstance()) return this;
1836            this.mergeUnknownFields(other.getUnknownFields());
1837            return this;
1838          }
1839          
1840          public final boolean isInitialized() {
1841            return true;
1842          }
1843          
1844          public Builder mergeFrom(
1845              com.google.protobuf.CodedInputStream input,
1846              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1847              throws java.io.IOException {
1848            com.google.protobuf.UnknownFieldSet.Builder unknownFields =
1849              com.google.protobuf.UnknownFieldSet.newBuilder(
1850                this.getUnknownFields());
1851            while (true) {
1852              int tag = input.readTag();
1853              switch (tag) {
1854                case 0:
1855                  this.setUnknownFields(unknownFields.build());
1856                  onChanged();
1857                  return this;
1858                default: {
1859                  if (!parseUnknownField(input, unknownFields,
1860                                         extensionRegistry, tag)) {
1861                    this.setUnknownFields(unknownFields.build());
1862                    onChanged();
1863                    return this;
1864                  }
1865                  break;
1866                }
1867              }
1868            }
1869          }
1870          
1871          
1872          // @@protoc_insertion_point(builder_scope:TransitionToStandbyResponseProto)
1873        }
1874        
1875        static {
1876          defaultInstance = new TransitionToStandbyResponseProto(true);
1877          defaultInstance.initFields();
1878        }
1879        
1880        // @@protoc_insertion_point(class_scope:TransitionToStandbyResponseProto)
1881      }
1882      
1883      public interface GetServiceStatusRequestProtoOrBuilder
1884          extends com.google.protobuf.MessageOrBuilder {
1885      }
1886      public static final class GetServiceStatusRequestProto extends
1887          com.google.protobuf.GeneratedMessage
1888          implements GetServiceStatusRequestProtoOrBuilder {
1889        // Use GetServiceStatusRequestProto.newBuilder() to construct.
1890        private GetServiceStatusRequestProto(Builder builder) {
1891          super(builder);
1892        }
1893        private GetServiceStatusRequestProto(boolean noInit) {}
1894        
1895        private static final GetServiceStatusRequestProto defaultInstance;
1896        public static GetServiceStatusRequestProto getDefaultInstance() {
1897          return defaultInstance;
1898        }
1899        
1900        public GetServiceStatusRequestProto getDefaultInstanceForType() {
1901          return defaultInstance;
1902        }
1903        
1904        public static final com.google.protobuf.Descriptors.Descriptor
1905            getDescriptor() {
1906          return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_GetServiceStatusRequestProto_descriptor;
1907        }
1908        
1909        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1910            internalGetFieldAccessorTable() {
1911          return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_GetServiceStatusRequestProto_fieldAccessorTable;
1912        }
1913        
1914        private void initFields() {
1915        }
1916        private byte memoizedIsInitialized = -1;
1917        public final boolean isInitialized() {
1918          byte isInitialized = memoizedIsInitialized;
1919          if (isInitialized != -1) return isInitialized == 1;
1920          
1921          memoizedIsInitialized = 1;
1922          return true;
1923        }
1924        
1925        public void writeTo(com.google.protobuf.CodedOutputStream output)
1926                            throws java.io.IOException {
1927          getSerializedSize();
1928          getUnknownFields().writeTo(output);
1929        }
1930        
1931        private int memoizedSerializedSize = -1;
1932        public int getSerializedSize() {
1933          int size = memoizedSerializedSize;
1934          if (size != -1) return size;
1935        
1936          size = 0;
1937          size += getUnknownFields().getSerializedSize();
1938          memoizedSerializedSize = size;
1939          return size;
1940        }
1941        
1942        private static final long serialVersionUID = 0L;
1943        @java.lang.Override
1944        protected java.lang.Object writeReplace()
1945            throws java.io.ObjectStreamException {
1946          return super.writeReplace();
1947        }
1948        
1949        @java.lang.Override
1950        public boolean equals(final java.lang.Object obj) {
1951          if (obj == this) {
1952           return true;
1953          }
1954          if (!(obj instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto)) {
1955            return super.equals(obj);
1956          }
1957          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto other = (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto) obj;
1958          
1959          boolean result = true;
1960          result = result &&
1961              getUnknownFields().equals(other.getUnknownFields());
1962          return result;
1963        }
1964        
1965        @java.lang.Override
1966        public int hashCode() {
1967          int hash = 41;
1968          hash = (19 * hash) + getDescriptorForType().hashCode();
1969          hash = (29 * hash) + getUnknownFields().hashCode();
1970          return hash;
1971        }
1972        
1973        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto parseFrom(
1974            com.google.protobuf.ByteString data)
1975            throws com.google.protobuf.InvalidProtocolBufferException {
1976          return newBuilder().mergeFrom(data).buildParsed();
1977        }
1978        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto parseFrom(
1979            com.google.protobuf.ByteString data,
1980            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1981            throws com.google.protobuf.InvalidProtocolBufferException {
1982          return newBuilder().mergeFrom(data, extensionRegistry)
1983                   .buildParsed();
1984        }
1985        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto parseFrom(byte[] data)
1986            throws com.google.protobuf.InvalidProtocolBufferException {
1987          return newBuilder().mergeFrom(data).buildParsed();
1988        }
1989        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto parseFrom(
1990            byte[] data,
1991            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1992            throws com.google.protobuf.InvalidProtocolBufferException {
1993          return newBuilder().mergeFrom(data, extensionRegistry)
1994                   .buildParsed();
1995        }
1996        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto parseFrom(java.io.InputStream input)
1997            throws java.io.IOException {
1998          return newBuilder().mergeFrom(input).buildParsed();
1999        }
2000        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto parseFrom(
2001            java.io.InputStream input,
2002            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2003            throws java.io.IOException {
2004          return newBuilder().mergeFrom(input, extensionRegistry)
2005                   .buildParsed();
2006        }
2007        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto parseDelimitedFrom(java.io.InputStream input)
2008            throws java.io.IOException {
2009          Builder builder = newBuilder();
2010          if (builder.mergeDelimitedFrom(input)) {
2011            return builder.buildParsed();
2012          } else {
2013            return null;
2014          }
2015        }
2016        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto parseDelimitedFrom(
2017            java.io.InputStream input,
2018            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2019            throws java.io.IOException {
2020          Builder builder = newBuilder();
2021          if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
2022            return builder.buildParsed();
2023          } else {
2024            return null;
2025          }
2026        }
2027        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto parseFrom(
2028            com.google.protobuf.CodedInputStream input)
2029            throws java.io.IOException {
2030          return newBuilder().mergeFrom(input).buildParsed();
2031        }
2032        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto parseFrom(
2033            com.google.protobuf.CodedInputStream input,
2034            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2035            throws java.io.IOException {
2036          return newBuilder().mergeFrom(input, extensionRegistry)
2037                   .buildParsed();
2038        }
2039        
2040        public static Builder newBuilder() { return Builder.create(); }
2041        public Builder newBuilderForType() { return newBuilder(); }
2042        public static Builder newBuilder(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto prototype) {
2043          return newBuilder().mergeFrom(prototype);
2044        }
2045        public Builder toBuilder() { return newBuilder(this); }
2046        
2047        @java.lang.Override
2048        protected Builder newBuilderForType(
2049            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2050          Builder builder = new Builder(parent);
2051          return builder;
2052        }
2053        public static final class Builder extends
2054            com.google.protobuf.GeneratedMessage.Builder<Builder>
2055           implements org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProtoOrBuilder {
2056          public static final com.google.protobuf.Descriptors.Descriptor
2057              getDescriptor() {
2058            return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_GetServiceStatusRequestProto_descriptor;
2059          }
2060          
2061          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
2062              internalGetFieldAccessorTable() {
2063            return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_GetServiceStatusRequestProto_fieldAccessorTable;
2064          }
2065          
2066          // Construct using org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto.newBuilder()
2067          private Builder() {
2068            maybeForceBuilderInitialization();
2069          }
2070          
2071          private Builder(BuilderParent parent) {
2072            super(parent);
2073            maybeForceBuilderInitialization();
2074          }
2075          private void maybeForceBuilderInitialization() {
2076            if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
2077            }
2078          }
2079          private static Builder create() {
2080            return new Builder();
2081          }
2082          
2083          public Builder clear() {
2084            super.clear();
2085            return this;
2086          }
2087          
2088          public Builder clone() {
2089            return create().mergeFrom(buildPartial());
2090          }
2091          
2092          public com.google.protobuf.Descriptors.Descriptor
2093              getDescriptorForType() {
2094            return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto.getDescriptor();
2095          }
2096          
2097          public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto getDefaultInstanceForType() {
2098            return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto.getDefaultInstance();
2099          }
2100          
2101          public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto build() {
2102            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto result = buildPartial();
2103            if (!result.isInitialized()) {
2104              throw newUninitializedMessageException(result);
2105            }
2106            return result;
2107          }
2108          
2109          private org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto buildParsed()
2110              throws com.google.protobuf.InvalidProtocolBufferException {
2111            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto result = buildPartial();
2112            if (!result.isInitialized()) {
2113              throw newUninitializedMessageException(
2114                result).asInvalidProtocolBufferException();
2115            }
2116            return result;
2117          }
2118          
2119          public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto buildPartial() {
2120            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto result = new org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto(this);
2121            onBuilt();
2122            return result;
2123          }
2124          
2125          public Builder mergeFrom(com.google.protobuf.Message other) {
2126            if (other instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto) {
2127              return mergeFrom((org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto)other);
2128            } else {
2129              super.mergeFrom(other);
2130              return this;
2131            }
2132          }
2133          
2134          public Builder mergeFrom(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto other) {
2135            if (other == org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto.getDefaultInstance()) return this;
2136            this.mergeUnknownFields(other.getUnknownFields());
2137            return this;
2138          }
2139          
2140          public final boolean isInitialized() {
2141            return true;
2142          }
2143          
2144          public Builder mergeFrom(
2145              com.google.protobuf.CodedInputStream input,
2146              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2147              throws java.io.IOException {
2148            com.google.protobuf.UnknownFieldSet.Builder unknownFields =
2149              com.google.protobuf.UnknownFieldSet.newBuilder(
2150                this.getUnknownFields());
2151            while (true) {
2152              int tag = input.readTag();
2153              switch (tag) {
2154                case 0:
2155                  this.setUnknownFields(unknownFields.build());
2156                  onChanged();
2157                  return this;
2158                default: {
2159                  if (!parseUnknownField(input, unknownFields,
2160                                         extensionRegistry, tag)) {
2161                    this.setUnknownFields(unknownFields.build());
2162                    onChanged();
2163                    return this;
2164                  }
2165                  break;
2166                }
2167              }
2168            }
2169          }
2170          
2171          
2172          // @@protoc_insertion_point(builder_scope:GetServiceStatusRequestProto)
2173        }
2174        
2175        static {
2176          defaultInstance = new GetServiceStatusRequestProto(true);
2177          defaultInstance.initFields();
2178        }
2179        
2180        // @@protoc_insertion_point(class_scope:GetServiceStatusRequestProto)
2181      }
2182      
2183      public interface GetServiceStatusResponseProtoOrBuilder
2184          extends com.google.protobuf.MessageOrBuilder {
2185        
2186        // required .HAServiceStateProto state = 1;
2187        boolean hasState();
2188        org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceStateProto getState();
2189        
2190        // optional bool readyToBecomeActive = 2;
2191        boolean hasReadyToBecomeActive();
2192        boolean getReadyToBecomeActive();
2193        
2194        // optional string notReadyReason = 3;
2195        boolean hasNotReadyReason();
2196        String getNotReadyReason();
2197      }
2198      public static final class GetServiceStatusResponseProto extends
2199          com.google.protobuf.GeneratedMessage
2200          implements GetServiceStatusResponseProtoOrBuilder {
2201        // Use GetServiceStatusResponseProto.newBuilder() to construct.
2202        private GetServiceStatusResponseProto(Builder builder) {
2203          super(builder);
2204        }
2205        private GetServiceStatusResponseProto(boolean noInit) {}
2206        
2207        private static final GetServiceStatusResponseProto defaultInstance;
2208        public static GetServiceStatusResponseProto getDefaultInstance() {
2209          return defaultInstance;
2210        }
2211        
2212        public GetServiceStatusResponseProto getDefaultInstanceForType() {
2213          return defaultInstance;
2214        }
2215        
2216        public static final com.google.protobuf.Descriptors.Descriptor
2217            getDescriptor() {
2218          return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_GetServiceStatusResponseProto_descriptor;
2219        }
2220        
2221        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
2222            internalGetFieldAccessorTable() {
2223          return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_GetServiceStatusResponseProto_fieldAccessorTable;
2224        }
2225        
2226        private int bitField0_;
2227        // required .HAServiceStateProto state = 1;
2228        public static final int STATE_FIELD_NUMBER = 1;
2229        private org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceStateProto state_;
2230        public boolean hasState() {
2231          return ((bitField0_ & 0x00000001) == 0x00000001);
2232        }
2233        public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceStateProto getState() {
2234          return state_;
2235        }
2236        
2237        // optional bool readyToBecomeActive = 2;
2238        public static final int READYTOBECOMEACTIVE_FIELD_NUMBER = 2;
2239        private boolean readyToBecomeActive_;
2240        public boolean hasReadyToBecomeActive() {
2241          return ((bitField0_ & 0x00000002) == 0x00000002);
2242        }
2243        public boolean getReadyToBecomeActive() {
2244          return readyToBecomeActive_;
2245        }
2246        
2247        // optional string notReadyReason = 3;
2248        public static final int NOTREADYREASON_FIELD_NUMBER = 3;
2249        private java.lang.Object notReadyReason_;
2250        public boolean hasNotReadyReason() {
2251          return ((bitField0_ & 0x00000004) == 0x00000004);
2252        }
2253        public String getNotReadyReason() {
2254          java.lang.Object ref = notReadyReason_;
2255          if (ref instanceof String) {
2256            return (String) ref;
2257          } else {
2258            com.google.protobuf.ByteString bs = 
2259                (com.google.protobuf.ByteString) ref;
2260            String s = bs.toStringUtf8();
2261            if (com.google.protobuf.Internal.isValidUtf8(bs)) {
2262              notReadyReason_ = s;
2263            }
2264            return s;
2265          }
2266        }
2267        private com.google.protobuf.ByteString getNotReadyReasonBytes() {
2268          java.lang.Object ref = notReadyReason_;
2269          if (ref instanceof String) {
2270            com.google.protobuf.ByteString b = 
2271                com.google.protobuf.ByteString.copyFromUtf8((String) ref);
2272            notReadyReason_ = b;
2273            return b;
2274          } else {
2275            return (com.google.protobuf.ByteString) ref;
2276          }
2277        }
2278        
2279        private void initFields() {
2280          state_ = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceStateProto.INITIALIZING;
2281          readyToBecomeActive_ = false;
2282          notReadyReason_ = "";
2283        }
2284        private byte memoizedIsInitialized = -1;
2285        public final boolean isInitialized() {
2286          byte isInitialized = memoizedIsInitialized;
2287          if (isInitialized != -1) return isInitialized == 1;
2288          
2289          if (!hasState()) {
2290            memoizedIsInitialized = 0;
2291            return false;
2292          }
2293          memoizedIsInitialized = 1;
2294          return true;
2295        }
2296        
2297        public void writeTo(com.google.protobuf.CodedOutputStream output)
2298                            throws java.io.IOException {
2299          getSerializedSize();
2300          if (((bitField0_ & 0x00000001) == 0x00000001)) {
2301            output.writeEnum(1, state_.getNumber());
2302          }
2303          if (((bitField0_ & 0x00000002) == 0x00000002)) {
2304            output.writeBool(2, readyToBecomeActive_);
2305          }
2306          if (((bitField0_ & 0x00000004) == 0x00000004)) {
2307            output.writeBytes(3, getNotReadyReasonBytes());
2308          }
2309          getUnknownFields().writeTo(output);
2310        }
2311        
2312        private int memoizedSerializedSize = -1;
2313        public int getSerializedSize() {
2314          int size = memoizedSerializedSize;
2315          if (size != -1) return size;
2316        
2317          size = 0;
2318          if (((bitField0_ & 0x00000001) == 0x00000001)) {
2319            size += com.google.protobuf.CodedOutputStream
2320              .computeEnumSize(1, state_.getNumber());
2321          }
2322          if (((bitField0_ & 0x00000002) == 0x00000002)) {
2323            size += com.google.protobuf.CodedOutputStream
2324              .computeBoolSize(2, readyToBecomeActive_);
2325          }
2326          if (((bitField0_ & 0x00000004) == 0x00000004)) {
2327            size += com.google.protobuf.CodedOutputStream
2328              .computeBytesSize(3, getNotReadyReasonBytes());
2329          }
2330          size += getUnknownFields().getSerializedSize();
2331          memoizedSerializedSize = size;
2332          return size;
2333        }
2334        
2335        private static final long serialVersionUID = 0L;
2336        @java.lang.Override
2337        protected java.lang.Object writeReplace()
2338            throws java.io.ObjectStreamException {
2339          return super.writeReplace();
2340        }
2341        
2342        @java.lang.Override
2343        public boolean equals(final java.lang.Object obj) {
2344          if (obj == this) {
2345           return true;
2346          }
2347          if (!(obj instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto)) {
2348            return super.equals(obj);
2349          }
2350          org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto other = (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto) obj;
2351          
2352          boolean result = true;
2353          result = result && (hasState() == other.hasState());
2354          if (hasState()) {
2355            result = result &&
2356                (getState() == other.getState());
2357          }
2358          result = result && (hasReadyToBecomeActive() == other.hasReadyToBecomeActive());
2359          if (hasReadyToBecomeActive()) {
2360            result = result && (getReadyToBecomeActive()
2361                == other.getReadyToBecomeActive());
2362          }
2363          result = result && (hasNotReadyReason() == other.hasNotReadyReason());
2364          if (hasNotReadyReason()) {
2365            result = result && getNotReadyReason()
2366                .equals(other.getNotReadyReason());
2367          }
2368          result = result &&
2369              getUnknownFields().equals(other.getUnknownFields());
2370          return result;
2371        }
2372        
2373        @java.lang.Override
2374        public int hashCode() {
2375          int hash = 41;
2376          hash = (19 * hash) + getDescriptorForType().hashCode();
2377          if (hasState()) {
2378            hash = (37 * hash) + STATE_FIELD_NUMBER;
2379            hash = (53 * hash) + hashEnum(getState());
2380          }
2381          if (hasReadyToBecomeActive()) {
2382            hash = (37 * hash) + READYTOBECOMEACTIVE_FIELD_NUMBER;
2383            hash = (53 * hash) + hashBoolean(getReadyToBecomeActive());
2384          }
2385          if (hasNotReadyReason()) {
2386            hash = (37 * hash) + NOTREADYREASON_FIELD_NUMBER;
2387            hash = (53 * hash) + getNotReadyReason().hashCode();
2388          }
2389          hash = (29 * hash) + getUnknownFields().hashCode();
2390          return hash;
2391        }
2392        
2393        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto parseFrom(
2394            com.google.protobuf.ByteString data)
2395            throws com.google.protobuf.InvalidProtocolBufferException {
2396          return newBuilder().mergeFrom(data).buildParsed();
2397        }
2398        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto parseFrom(
2399            com.google.protobuf.ByteString data,
2400            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2401            throws com.google.protobuf.InvalidProtocolBufferException {
2402          return newBuilder().mergeFrom(data, extensionRegistry)
2403                   .buildParsed();
2404        }
2405        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto parseFrom(byte[] data)
2406            throws com.google.protobuf.InvalidProtocolBufferException {
2407          return newBuilder().mergeFrom(data).buildParsed();
2408        }
2409        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto parseFrom(
2410            byte[] data,
2411            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2412            throws com.google.protobuf.InvalidProtocolBufferException {
2413          return newBuilder().mergeFrom(data, extensionRegistry)
2414                   .buildParsed();
2415        }
2416        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto parseFrom(java.io.InputStream input)
2417            throws java.io.IOException {
2418          return newBuilder().mergeFrom(input).buildParsed();
2419        }
2420        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto parseFrom(
2421            java.io.InputStream input,
2422            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2423            throws java.io.IOException {
2424          return newBuilder().mergeFrom(input, extensionRegistry)
2425                   .buildParsed();
2426        }
2427        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto parseDelimitedFrom(java.io.InputStream input)
2428            throws java.io.IOException {
2429          Builder builder = newBuilder();
2430          if (builder.mergeDelimitedFrom(input)) {
2431            return builder.buildParsed();
2432          } else {
2433            return null;
2434          }
2435        }
2436        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto parseDelimitedFrom(
2437            java.io.InputStream input,
2438            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2439            throws java.io.IOException {
2440          Builder builder = newBuilder();
2441          if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
2442            return builder.buildParsed();
2443          } else {
2444            return null;
2445          }
2446        }
2447        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto parseFrom(
2448            com.google.protobuf.CodedInputStream input)
2449            throws java.io.IOException {
2450          return newBuilder().mergeFrom(input).buildParsed();
2451        }
2452        public static org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto parseFrom(
2453            com.google.protobuf.CodedInputStream input,
2454            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2455            throws java.io.IOException {
2456          return newBuilder().mergeFrom(input, extensionRegistry)
2457                   .buildParsed();
2458        }
2459        
2460        public static Builder newBuilder() { return Builder.create(); }
2461        public Builder newBuilderForType() { return newBuilder(); }
2462        public static Builder newBuilder(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto prototype) {
2463          return newBuilder().mergeFrom(prototype);
2464        }
2465        public Builder toBuilder() { return newBuilder(this); }
2466        
2467        @java.lang.Override
2468        protected Builder newBuilderForType(
2469            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2470          Builder builder = new Builder(parent);
2471          return builder;
2472        }
2473        public static final class Builder extends
2474            com.google.protobuf.GeneratedMessage.Builder<Builder>
2475           implements org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProtoOrBuilder {
2476          public static final com.google.protobuf.Descriptors.Descriptor
2477              getDescriptor() {
2478            return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_GetServiceStatusResponseProto_descriptor;
2479          }
2480          
2481          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
2482              internalGetFieldAccessorTable() {
2483            return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.internal_static_GetServiceStatusResponseProto_fieldAccessorTable;
2484          }
2485          
2486          // Construct using org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto.newBuilder()
2487          private Builder() {
2488            maybeForceBuilderInitialization();
2489          }
2490          
2491          private Builder(BuilderParent parent) {
2492            super(parent);
2493            maybeForceBuilderInitialization();
2494          }
2495          private void maybeForceBuilderInitialization() {
2496            if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
2497            }
2498          }
2499          private static Builder create() {
2500            return new Builder();
2501          }
2502          
2503          public Builder clear() {
2504            super.clear();
2505            state_ = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceStateProto.INITIALIZING;
2506            bitField0_ = (bitField0_ & ~0x00000001);
2507            readyToBecomeActive_ = false;
2508            bitField0_ = (bitField0_ & ~0x00000002);
2509            notReadyReason_ = "";
2510            bitField0_ = (bitField0_ & ~0x00000004);
2511            return this;
2512          }
2513          
2514          public Builder clone() {
2515            return create().mergeFrom(buildPartial());
2516          }
2517          
2518          public com.google.protobuf.Descriptors.Descriptor
2519              getDescriptorForType() {
2520            return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto.getDescriptor();
2521          }
2522          
2523          public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto getDefaultInstanceForType() {
2524            return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto.getDefaultInstance();
2525          }
2526          
2527          public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto build() {
2528            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto result = buildPartial();
2529            if (!result.isInitialized()) {
2530              throw newUninitializedMessageException(result);
2531            }
2532            return result;
2533          }
2534          
2535          private org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto buildParsed()
2536              throws com.google.protobuf.InvalidProtocolBufferException {
2537            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto result = buildPartial();
2538            if (!result.isInitialized()) {
2539              throw newUninitializedMessageException(
2540                result).asInvalidProtocolBufferException();
2541            }
2542            return result;
2543          }
2544          
2545          public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto buildPartial() {
2546            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto result = new org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto(this);
2547            int from_bitField0_ = bitField0_;
2548            int to_bitField0_ = 0;
2549            if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
2550              to_bitField0_ |= 0x00000001;
2551            }
2552            result.state_ = state_;
2553            if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
2554              to_bitField0_ |= 0x00000002;
2555            }
2556            result.readyToBecomeActive_ = readyToBecomeActive_;
2557            if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
2558              to_bitField0_ |= 0x00000004;
2559            }
2560            result.notReadyReason_ = notReadyReason_;
2561            result.bitField0_ = to_bitField0_;
2562            onBuilt();
2563            return result;
2564          }
2565          
2566          public Builder mergeFrom(com.google.protobuf.Message other) {
2567            if (other instanceof org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto) {
2568              return mergeFrom((org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto)other);
2569            } else {
2570              super.mergeFrom(other);
2571              return this;
2572            }
2573          }
2574          
2575          public Builder mergeFrom(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto other) {
2576            if (other == org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto.getDefaultInstance()) return this;
2577            if (other.hasState()) {
2578              setState(other.getState());
2579            }
2580            if (other.hasReadyToBecomeActive()) {
2581              setReadyToBecomeActive(other.getReadyToBecomeActive());
2582            }
2583            if (other.hasNotReadyReason()) {
2584              setNotReadyReason(other.getNotReadyReason());
2585            }
2586            this.mergeUnknownFields(other.getUnknownFields());
2587            return this;
2588          }
2589          
2590          public final boolean isInitialized() {
2591            if (!hasState()) {
2592              
2593              return false;
2594            }
2595            return true;
2596          }
2597          
2598          public Builder mergeFrom(
2599              com.google.protobuf.CodedInputStream input,
2600              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2601              throws java.io.IOException {
2602            com.google.protobuf.UnknownFieldSet.Builder unknownFields =
2603              com.google.protobuf.UnknownFieldSet.newBuilder(
2604                this.getUnknownFields());
2605            while (true) {
2606              int tag = input.readTag();
2607              switch (tag) {
2608                case 0:
2609                  this.setUnknownFields(unknownFields.build());
2610                  onChanged();
2611                  return this;
2612                default: {
2613                  if (!parseUnknownField(input, unknownFields,
2614                                         extensionRegistry, tag)) {
2615                    this.setUnknownFields(unknownFields.build());
2616                    onChanged();
2617                    return this;
2618                  }
2619                  break;
2620                }
2621                case 8: {
2622                  int rawValue = input.readEnum();
2623                  org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceStateProto value = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceStateProto.valueOf(rawValue);
2624                  if (value == null) {
2625                    unknownFields.mergeVarintField(1, rawValue);
2626                  } else {
2627                    bitField0_ |= 0x00000001;
2628                    state_ = value;
2629                  }
2630                  break;
2631                }
2632                case 16: {
2633                  bitField0_ |= 0x00000002;
2634                  readyToBecomeActive_ = input.readBool();
2635                  break;
2636                }
2637                case 26: {
2638                  bitField0_ |= 0x00000004;
2639                  notReadyReason_ = input.readBytes();
2640                  break;
2641                }
2642              }
2643            }
2644          }
2645          
2646          private int bitField0_;
2647          
2648          // required .HAServiceStateProto state = 1;
2649          private org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceStateProto state_ = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceStateProto.INITIALIZING;
2650          public boolean hasState() {
2651            return ((bitField0_ & 0x00000001) == 0x00000001);
2652          }
2653          public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceStateProto getState() {
2654            return state_;
2655          }
2656          public Builder setState(org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceStateProto value) {
2657            if (value == null) {
2658              throw new NullPointerException();
2659            }
2660            bitField0_ |= 0x00000001;
2661            state_ = value;
2662            onChanged();
2663            return this;
2664          }
2665          public Builder clearState() {
2666            bitField0_ = (bitField0_ & ~0x00000001);
2667            state_ = org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceStateProto.INITIALIZING;
2668            onChanged();
2669            return this;
2670          }
2671          
2672          // optional bool readyToBecomeActive = 2;
2673          private boolean readyToBecomeActive_ ;
2674          public boolean hasReadyToBecomeActive() {
2675            return ((bitField0_ & 0x00000002) == 0x00000002);
2676          }
2677          public boolean getReadyToBecomeActive() {
2678            return readyToBecomeActive_;
2679          }
2680          public Builder setReadyToBecomeActive(boolean value) {
2681            bitField0_ |= 0x00000002;
2682            readyToBecomeActive_ = value;
2683            onChanged();
2684            return this;
2685          }
2686          public Builder clearReadyToBecomeActive() {
2687            bitField0_ = (bitField0_ & ~0x00000002);
2688            readyToBecomeActive_ = false;
2689            onChanged();
2690            return this;
2691          }
2692          
2693          // optional string notReadyReason = 3;
2694          private java.lang.Object notReadyReason_ = "";
2695          public boolean hasNotReadyReason() {
2696            return ((bitField0_ & 0x00000004) == 0x00000004);
2697          }
2698          public String getNotReadyReason() {
2699            java.lang.Object ref = notReadyReason_;
2700            if (!(ref instanceof String)) {
2701              String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
2702              notReadyReason_ = s;
2703              return s;
2704            } else {
2705              return (String) ref;
2706            }
2707          }
2708          public Builder setNotReadyReason(String value) {
2709            if (value == null) {
2710        throw new NullPointerException();
2711      }
2712      bitField0_ |= 0x00000004;
2713            notReadyReason_ = value;
2714            onChanged();
2715            return this;
2716          }
2717          public Builder clearNotReadyReason() {
2718            bitField0_ = (bitField0_ & ~0x00000004);
2719            notReadyReason_ = getDefaultInstance().getNotReadyReason();
2720            onChanged();
2721            return this;
2722          }
2723          void setNotReadyReason(com.google.protobuf.ByteString value) {
2724            bitField0_ |= 0x00000004;
2725            notReadyReason_ = value;
2726            onChanged();
2727          }
2728          
2729          // @@protoc_insertion_point(builder_scope:GetServiceStatusResponseProto)
2730        }
2731        
2732        static {
2733          defaultInstance = new GetServiceStatusResponseProto(true);
2734          defaultInstance.initFields();
2735        }
2736        
2737        // @@protoc_insertion_point(class_scope:GetServiceStatusResponseProto)
2738      }
2739      
2740      public static abstract class HAServiceProtocolService
2741          implements com.google.protobuf.Service {
2742        protected HAServiceProtocolService() {}
2743        
2744        public interface Interface {
2745          public abstract void monitorHealth(
2746              com.google.protobuf.RpcController controller,
2747              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto request,
2748              com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto> done);
2749          
2750          public abstract void transitionToActive(
2751              com.google.protobuf.RpcController controller,
2752              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto request,
2753              com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto> done);
2754          
2755          public abstract void transitionToStandby(
2756              com.google.protobuf.RpcController controller,
2757              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto request,
2758              com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto> done);
2759          
2760          public abstract void getServiceStatus(
2761              com.google.protobuf.RpcController controller,
2762              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto request,
2763              com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto> done);
2764          
2765        }
2766        
2767        public static com.google.protobuf.Service newReflectiveService(
2768            final Interface impl) {
2769          return new HAServiceProtocolService() {
2770            @java.lang.Override
2771            public  void monitorHealth(
2772                com.google.protobuf.RpcController controller,
2773                org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto request,
2774                com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto> done) {
2775              impl.monitorHealth(controller, request, done);
2776            }
2777            
2778            @java.lang.Override
2779            public  void transitionToActive(
2780                com.google.protobuf.RpcController controller,
2781                org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto request,
2782                com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto> done) {
2783              impl.transitionToActive(controller, request, done);
2784            }
2785            
2786            @java.lang.Override
2787            public  void transitionToStandby(
2788                com.google.protobuf.RpcController controller,
2789                org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto request,
2790                com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto> done) {
2791              impl.transitionToStandby(controller, request, done);
2792            }
2793            
2794            @java.lang.Override
2795            public  void getServiceStatus(
2796                com.google.protobuf.RpcController controller,
2797                org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto request,
2798                com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto> done) {
2799              impl.getServiceStatus(controller, request, done);
2800            }
2801            
2802          };
2803        }
2804        
2805        public static com.google.protobuf.BlockingService
2806            newReflectiveBlockingService(final BlockingInterface impl) {
2807          return new com.google.protobuf.BlockingService() {
2808            public final com.google.protobuf.Descriptors.ServiceDescriptor
2809                getDescriptorForType() {
2810              return getDescriptor();
2811            }
2812            
2813            public final com.google.protobuf.Message callBlockingMethod(
2814                com.google.protobuf.Descriptors.MethodDescriptor method,
2815                com.google.protobuf.RpcController controller,
2816                com.google.protobuf.Message request)
2817                throws com.google.protobuf.ServiceException {
2818              if (method.getService() != getDescriptor()) {
2819                throw new java.lang.IllegalArgumentException(
2820                  "Service.callBlockingMethod() given method descriptor for " +
2821                  "wrong service type.");
2822              }
2823              switch(method.getIndex()) {
2824                case 0:
2825                  return impl.monitorHealth(controller, (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto)request);
2826                case 1:
2827                  return impl.transitionToActive(controller, (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto)request);
2828                case 2:
2829                  return impl.transitionToStandby(controller, (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto)request);
2830                case 3:
2831                  return impl.getServiceStatus(controller, (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto)request);
2832                default:
2833                  throw new java.lang.AssertionError("Can't get here.");
2834              }
2835            }
2836            
2837            public final com.google.protobuf.Message
2838                getRequestPrototype(
2839                com.google.protobuf.Descriptors.MethodDescriptor method) {
2840              if (method.getService() != getDescriptor()) {
2841                throw new java.lang.IllegalArgumentException(
2842                  "Service.getRequestPrototype() given method " +
2843                  "descriptor for wrong service type.");
2844              }
2845              switch(method.getIndex()) {
2846                case 0:
2847                  return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto.getDefaultInstance();
2848                case 1:
2849                  return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto.getDefaultInstance();
2850                case 2:
2851                  return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto.getDefaultInstance();
2852                case 3:
2853                  return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto.getDefaultInstance();
2854                default:
2855                  throw new java.lang.AssertionError("Can't get here.");
2856              }
2857            }
2858            
2859            public final com.google.protobuf.Message
2860                getResponsePrototype(
2861                com.google.protobuf.Descriptors.MethodDescriptor method) {
2862              if (method.getService() != getDescriptor()) {
2863                throw new java.lang.IllegalArgumentException(
2864                  "Service.getResponsePrototype() given method " +
2865                  "descriptor for wrong service type.");
2866              }
2867              switch(method.getIndex()) {
2868                case 0:
2869                  return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto.getDefaultInstance();
2870                case 1:
2871                  return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto.getDefaultInstance();
2872                case 2:
2873                  return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto.getDefaultInstance();
2874                case 3:
2875                  return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto.getDefaultInstance();
2876                default:
2877                  throw new java.lang.AssertionError("Can't get here.");
2878              }
2879            }
2880            
2881          };
2882        }
2883        
2884        public abstract void monitorHealth(
2885            com.google.protobuf.RpcController controller,
2886            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto request,
2887            com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto> done);
2888        
2889        public abstract void transitionToActive(
2890            com.google.protobuf.RpcController controller,
2891            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto request,
2892            com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto> done);
2893        
2894        public abstract void transitionToStandby(
2895            com.google.protobuf.RpcController controller,
2896            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto request,
2897            com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto> done);
2898        
2899        public abstract void getServiceStatus(
2900            com.google.protobuf.RpcController controller,
2901            org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto request,
2902            com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto> done);
2903        
2904        public static final
2905            com.google.protobuf.Descriptors.ServiceDescriptor
2906            getDescriptor() {
2907          return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.getDescriptor().getServices().get(0);
2908        }
2909        public final com.google.protobuf.Descriptors.ServiceDescriptor
2910            getDescriptorForType() {
2911          return getDescriptor();
2912        }
2913        
2914        public final void callMethod(
2915            com.google.protobuf.Descriptors.MethodDescriptor method,
2916            com.google.protobuf.RpcController controller,
2917            com.google.protobuf.Message request,
2918            com.google.protobuf.RpcCallback<
2919              com.google.protobuf.Message> done) {
2920          if (method.getService() != getDescriptor()) {
2921            throw new java.lang.IllegalArgumentException(
2922              "Service.callMethod() given method descriptor for wrong " +
2923              "service type.");
2924          }
2925          switch(method.getIndex()) {
2926            case 0:
2927              this.monitorHealth(controller, (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto)request,
2928                com.google.protobuf.RpcUtil.<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto>specializeCallback(
2929                  done));
2930              return;
2931            case 1:
2932              this.transitionToActive(controller, (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto)request,
2933                com.google.protobuf.RpcUtil.<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto>specializeCallback(
2934                  done));
2935              return;
2936            case 2:
2937              this.transitionToStandby(controller, (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto)request,
2938                com.google.protobuf.RpcUtil.<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto>specializeCallback(
2939                  done));
2940              return;
2941            case 3:
2942              this.getServiceStatus(controller, (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto)request,
2943                com.google.protobuf.RpcUtil.<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto>specializeCallback(
2944                  done));
2945              return;
2946            default:
2947              throw new java.lang.AssertionError("Can't get here.");
2948          }
2949        }
2950        
2951        public final com.google.protobuf.Message
2952            getRequestPrototype(
2953            com.google.protobuf.Descriptors.MethodDescriptor method) {
2954          if (method.getService() != getDescriptor()) {
2955            throw new java.lang.IllegalArgumentException(
2956              "Service.getRequestPrototype() given method " +
2957              "descriptor for wrong service type.");
2958          }
2959          switch(method.getIndex()) {
2960            case 0:
2961              return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto.getDefaultInstance();
2962            case 1:
2963              return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto.getDefaultInstance();
2964            case 2:
2965              return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto.getDefaultInstance();
2966            case 3:
2967              return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto.getDefaultInstance();
2968            default:
2969              throw new java.lang.AssertionError("Can't get here.");
2970          }
2971        }
2972        
2973        public final com.google.protobuf.Message
2974            getResponsePrototype(
2975            com.google.protobuf.Descriptors.MethodDescriptor method) {
2976          if (method.getService() != getDescriptor()) {
2977            throw new java.lang.IllegalArgumentException(
2978              "Service.getResponsePrototype() given method " +
2979              "descriptor for wrong service type.");
2980          }
2981          switch(method.getIndex()) {
2982            case 0:
2983              return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto.getDefaultInstance();
2984            case 1:
2985              return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto.getDefaultInstance();
2986            case 2:
2987              return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto.getDefaultInstance();
2988            case 3:
2989              return org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto.getDefaultInstance();
2990            default:
2991              throw new java.lang.AssertionError("Can't get here.");
2992          }
2993        }
2994        
2995        public static Stub newStub(
2996            com.google.protobuf.RpcChannel channel) {
2997          return new Stub(channel);
2998        }
2999        
3000        public static final class Stub extends org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceProtocolService implements Interface {
3001          private Stub(com.google.protobuf.RpcChannel channel) {
3002            this.channel = channel;
3003          }
3004          
3005          private final com.google.protobuf.RpcChannel channel;
3006          
3007          public com.google.protobuf.RpcChannel getChannel() {
3008            return channel;
3009          }
3010          
3011          public  void monitorHealth(
3012              com.google.protobuf.RpcController controller,
3013              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto request,
3014              com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto> done) {
3015            channel.callMethod(
3016              getDescriptor().getMethods().get(0),
3017              controller,
3018              request,
3019              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto.getDefaultInstance(),
3020              com.google.protobuf.RpcUtil.generalizeCallback(
3021                done,
3022                org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto.class,
3023                org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto.getDefaultInstance()));
3024          }
3025          
3026          public  void transitionToActive(
3027              com.google.protobuf.RpcController controller,
3028              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto request,
3029              com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto> done) {
3030            channel.callMethod(
3031              getDescriptor().getMethods().get(1),
3032              controller,
3033              request,
3034              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto.getDefaultInstance(),
3035              com.google.protobuf.RpcUtil.generalizeCallback(
3036                done,
3037                org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto.class,
3038                org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto.getDefaultInstance()));
3039          }
3040          
3041          public  void transitionToStandby(
3042              com.google.protobuf.RpcController controller,
3043              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto request,
3044              com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto> done) {
3045            channel.callMethod(
3046              getDescriptor().getMethods().get(2),
3047              controller,
3048              request,
3049              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto.getDefaultInstance(),
3050              com.google.protobuf.RpcUtil.generalizeCallback(
3051                done,
3052                org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto.class,
3053                org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto.getDefaultInstance()));
3054          }
3055          
3056          public  void getServiceStatus(
3057              com.google.protobuf.RpcController controller,
3058              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto request,
3059              com.google.protobuf.RpcCallback<org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto> done) {
3060            channel.callMethod(
3061              getDescriptor().getMethods().get(3),
3062              controller,
3063              request,
3064              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto.getDefaultInstance(),
3065              com.google.protobuf.RpcUtil.generalizeCallback(
3066                done,
3067                org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto.class,
3068                org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto.getDefaultInstance()));
3069          }
3070        }
3071        
3072        public static BlockingInterface newBlockingStub(
3073            com.google.protobuf.BlockingRpcChannel channel) {
3074          return new BlockingStub(channel);
3075        }
3076        
3077        public interface BlockingInterface {
3078          public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto monitorHealth(
3079              com.google.protobuf.RpcController controller,
3080              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto request)
3081              throws com.google.protobuf.ServiceException;
3082          
3083          public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto transitionToActive(
3084              com.google.protobuf.RpcController controller,
3085              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto request)
3086              throws com.google.protobuf.ServiceException;
3087          
3088          public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto transitionToStandby(
3089              com.google.protobuf.RpcController controller,
3090              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto request)
3091              throws com.google.protobuf.ServiceException;
3092          
3093          public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto getServiceStatus(
3094              com.google.protobuf.RpcController controller,
3095              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto request)
3096              throws com.google.protobuf.ServiceException;
3097        }
3098        
3099        private static final class BlockingStub implements BlockingInterface {
3100          private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
3101            this.channel = channel;
3102          }
3103          
3104          private final com.google.protobuf.BlockingRpcChannel channel;
3105          
3106          public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto monitorHealth(
3107              com.google.protobuf.RpcController controller,
3108              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto request)
3109              throws com.google.protobuf.ServiceException {
3110            return (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto) channel.callBlockingMethod(
3111              getDescriptor().getMethods().get(0),
3112              controller,
3113              request,
3114              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto.getDefaultInstance());
3115          }
3116          
3117          
3118          public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto transitionToActive(
3119              com.google.protobuf.RpcController controller,
3120              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto request)
3121              throws com.google.protobuf.ServiceException {
3122            return (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto) channel.callBlockingMethod(
3123              getDescriptor().getMethods().get(1),
3124              controller,
3125              request,
3126              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto.getDefaultInstance());
3127          }
3128          
3129          
3130          public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto transitionToStandby(
3131              com.google.protobuf.RpcController controller,
3132              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto request)
3133              throws com.google.protobuf.ServiceException {
3134            return (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto) channel.callBlockingMethod(
3135              getDescriptor().getMethods().get(2),
3136              controller,
3137              request,
3138              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto.getDefaultInstance());
3139          }
3140          
3141          
3142          public org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto getServiceStatus(
3143              com.google.protobuf.RpcController controller,
3144              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto request)
3145              throws com.google.protobuf.ServiceException {
3146            return (org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto) channel.callBlockingMethod(
3147              getDescriptor().getMethods().get(3),
3148              controller,
3149              request,
3150              org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto.getDefaultInstance());
3151          }
3152          
3153        }
3154      }
3155      
3156      private static com.google.protobuf.Descriptors.Descriptor
3157        internal_static_MonitorHealthRequestProto_descriptor;
3158      private static
3159        com.google.protobuf.GeneratedMessage.FieldAccessorTable
3160          internal_static_MonitorHealthRequestProto_fieldAccessorTable;
3161      private static com.google.protobuf.Descriptors.Descriptor
3162        internal_static_MonitorHealthResponseProto_descriptor;
3163      private static
3164        com.google.protobuf.GeneratedMessage.FieldAccessorTable
3165          internal_static_MonitorHealthResponseProto_fieldAccessorTable;
3166      private static com.google.protobuf.Descriptors.Descriptor
3167        internal_static_TransitionToActiveRequestProto_descriptor;
3168      private static
3169        com.google.protobuf.GeneratedMessage.FieldAccessorTable
3170          internal_static_TransitionToActiveRequestProto_fieldAccessorTable;
3171      private static com.google.protobuf.Descriptors.Descriptor
3172        internal_static_TransitionToActiveResponseProto_descriptor;
3173      private static
3174        com.google.protobuf.GeneratedMessage.FieldAccessorTable
3175          internal_static_TransitionToActiveResponseProto_fieldAccessorTable;
3176      private static com.google.protobuf.Descriptors.Descriptor
3177        internal_static_TransitionToStandbyRequestProto_descriptor;
3178      private static
3179        com.google.protobuf.GeneratedMessage.FieldAccessorTable
3180          internal_static_TransitionToStandbyRequestProto_fieldAccessorTable;
3181      private static com.google.protobuf.Descriptors.Descriptor
3182        internal_static_TransitionToStandbyResponseProto_descriptor;
3183      private static
3184        com.google.protobuf.GeneratedMessage.FieldAccessorTable
3185          internal_static_TransitionToStandbyResponseProto_fieldAccessorTable;
3186      private static com.google.protobuf.Descriptors.Descriptor
3187        internal_static_GetServiceStatusRequestProto_descriptor;
3188      private static
3189        com.google.protobuf.GeneratedMessage.FieldAccessorTable
3190          internal_static_GetServiceStatusRequestProto_fieldAccessorTable;
3191      private static com.google.protobuf.Descriptors.Descriptor
3192        internal_static_GetServiceStatusResponseProto_descriptor;
3193      private static
3194        com.google.protobuf.GeneratedMessage.FieldAccessorTable
3195          internal_static_GetServiceStatusResponseProto_fieldAccessorTable;
3196      
3197      public static com.google.protobuf.Descriptors.FileDescriptor
3198          getDescriptor() {
3199        return descriptor;
3200      }
3201      private static com.google.protobuf.Descriptors.FileDescriptor
3202          descriptor;
3203      static {
3204        java.lang.String[] descriptorData = {
3205          "\n\027HAServiceProtocol.proto\"\033\n\031MonitorHeal" +
3206          "thRequestProto\"\034\n\032MonitorHealthResponseP" +
3207          "roto\" \n\036TransitionToActiveRequestProto\"!" +
3208          "\n\037TransitionToActiveResponseProto\"!\n\037Tra" +
3209          "nsitionToStandbyRequestProto\"\"\n Transiti" +
3210          "onToStandbyResponseProto\"\036\n\034GetServiceSt" +
3211          "atusRequestProto\"y\n\035GetServiceStatusResp" +
3212          "onseProto\022#\n\005state\030\001 \002(\0162\024.HAServiceStat" +
3213          "eProto\022\033\n\023readyToBecomeActive\030\002 \001(\010\022\026\n\016n" +
3214          "otReadyReason\030\003 \001(\t*@\n\023HAServiceStatePro",
3215          "to\022\020\n\014INITIALIZING\020\000\022\n\n\006ACTIVE\020\001\022\013\n\007STAN" +
3216          "DBY\020\0022\354\002\n\030HAServiceProtocolService\022H\n\rmo" +
3217          "nitorHealth\022\032.MonitorHealthRequestProto\032" +
3218          "\033.MonitorHealthResponseProto\022W\n\022transiti" +
3219          "onToActive\022\037.TransitionToActiveRequestPr" +
3220          "oto\032 .TransitionToActiveResponseProto\022Z\n" +
3221          "\023transitionToStandby\022 .TransitionToStand" +
3222          "byRequestProto\032!.TransitionToStandbyResp" +
3223          "onseProto\022Q\n\020getServiceStatus\022\035.GetServi" +
3224          "ceStatusRequestProto\032\036.GetServiceStatusR",
3225          "esponseProtoB;\n\032org.apache.hadoop.ha.pro" +
3226          "toB\027HAServiceProtocolProtos\210\001\001\240\001\001"
3227        };
3228        com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
3229          new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
3230            public com.google.protobuf.ExtensionRegistry assignDescriptors(
3231                com.google.protobuf.Descriptors.FileDescriptor root) {
3232              descriptor = root;
3233              internal_static_MonitorHealthRequestProto_descriptor =
3234                getDescriptor().getMessageTypes().get(0);
3235              internal_static_MonitorHealthRequestProto_fieldAccessorTable = new
3236                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
3237                  internal_static_MonitorHealthRequestProto_descriptor,
3238                  new java.lang.String[] { },
3239                  org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto.class,
3240                  org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestProto.Builder.class);
3241              internal_static_MonitorHealthResponseProto_descriptor =
3242                getDescriptor().getMessageTypes().get(1);
3243              internal_static_MonitorHealthResponseProto_fieldAccessorTable = new
3244                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
3245                  internal_static_MonitorHealthResponseProto_descriptor,
3246                  new java.lang.String[] { },
3247                  org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto.class,
3248                  org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthResponseProto.Builder.class);
3249              internal_static_TransitionToActiveRequestProto_descriptor =
3250                getDescriptor().getMessageTypes().get(2);
3251              internal_static_TransitionToActiveRequestProto_fieldAccessorTable = new
3252                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
3253                  internal_static_TransitionToActiveRequestProto_descriptor,
3254                  new java.lang.String[] { },
3255                  org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto.class,
3256                  org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto.Builder.class);
3257              internal_static_TransitionToActiveResponseProto_descriptor =
3258                getDescriptor().getMessageTypes().get(3);
3259              internal_static_TransitionToActiveResponseProto_fieldAccessorTable = new
3260                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
3261                  internal_static_TransitionToActiveResponseProto_descriptor,
3262                  new java.lang.String[] { },
3263                  org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto.class,
3264                  org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveResponseProto.Builder.class);
3265              internal_static_TransitionToStandbyRequestProto_descriptor =
3266                getDescriptor().getMessageTypes().get(4);
3267              internal_static_TransitionToStandbyRequestProto_fieldAccessorTable = new
3268                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
3269                  internal_static_TransitionToStandbyRequestProto_descriptor,
3270                  new java.lang.String[] { },
3271                  org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto.class,
3272                  org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto.Builder.class);
3273              internal_static_TransitionToStandbyResponseProto_descriptor =
3274                getDescriptor().getMessageTypes().get(5);
3275              internal_static_TransitionToStandbyResponseProto_fieldAccessorTable = new
3276                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
3277                  internal_static_TransitionToStandbyResponseProto_descriptor,
3278                  new java.lang.String[] { },
3279                  org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto.class,
3280                  org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyResponseProto.Builder.class);
3281              internal_static_GetServiceStatusRequestProto_descriptor =
3282                getDescriptor().getMessageTypes().get(6);
3283              internal_static_GetServiceStatusRequestProto_fieldAccessorTable = new
3284                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
3285                  internal_static_GetServiceStatusRequestProto_descriptor,
3286                  new java.lang.String[] { },
3287                  org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto.class,
3288                  org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusRequestProto.Builder.class);
3289              internal_static_GetServiceStatusResponseProto_descriptor =
3290                getDescriptor().getMessageTypes().get(7);
3291              internal_static_GetServiceStatusResponseProto_fieldAccessorTable = new
3292                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
3293                  internal_static_GetServiceStatusResponseProto_descriptor,
3294                  new java.lang.String[] { "State", "ReadyToBecomeActive", "NotReadyReason", },
3295                  org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto.class,
3296                  org.apache.hadoop.ha.proto.HAServiceProtocolProtos.GetServiceStatusResponseProto.Builder.class);
3297              return null;
3298            }
3299          };
3300        com.google.protobuf.Descriptors.FileDescriptor
3301          .internalBuildGeneratedFileFrom(descriptorData,
3302            new com.google.protobuf.Descriptors.FileDescriptor[] {
3303            }, assigner);
3304      }
3305      
3306      // @@protoc_insertion_point(outer_class_scope)
3307    }