001// Generated by the protocol buffer compiler.  DO NOT EDIT!
002// source: RpcHeader.proto
003
004package org.apache.hadoop.ipc.protobuf;
005
006public final class RpcHeaderProtos {
007  private RpcHeaderProtos() {}
008  public static void registerAllExtensions(
009      com.google.protobuf.ExtensionRegistry registry) {
010  }
011  /**
012   * Protobuf enum {@code hadoop.common.RpcKindProto}
013   *
014   * <pre>
015   **
016   * RpcKind determine the rpcEngine and the serialization of the rpc request
017   * </pre>
018   */
019  public enum RpcKindProto
020      implements com.google.protobuf.ProtocolMessageEnum {
021    /**
022     * <code>RPC_BUILTIN = 0;</code>
023     *
024     * <pre>
025     * Used for built in calls by tests
026     * </pre>
027     */
028    RPC_BUILTIN(0, 0),
029    /**
030     * <code>RPC_WRITABLE = 1;</code>
031     *
032     * <pre>
033     * Use WritableRpcEngine 
034     * </pre>
035     */
036    RPC_WRITABLE(1, 1),
037    /**
038     * <code>RPC_PROTOCOL_BUFFER = 2;</code>
039     *
040     * <pre>
041     * Use ProtobufRpcEngine
042     * </pre>
043     */
044    RPC_PROTOCOL_BUFFER(2, 2),
045    ;
046
047    /**
048     * <code>RPC_BUILTIN = 0;</code>
049     *
050     * <pre>
051     * Used for built in calls by tests
052     * </pre>
053     */
054    public static final int RPC_BUILTIN_VALUE = 0;
055    /**
056     * <code>RPC_WRITABLE = 1;</code>
057     *
058     * <pre>
059     * Use WritableRpcEngine 
060     * </pre>
061     */
062    public static final int RPC_WRITABLE_VALUE = 1;
063    /**
064     * <code>RPC_PROTOCOL_BUFFER = 2;</code>
065     *
066     * <pre>
067     * Use ProtobufRpcEngine
068     * </pre>
069     */
070    public static final int RPC_PROTOCOL_BUFFER_VALUE = 2;
071
072
073    public final int getNumber() { return value; }
074
075    public static RpcKindProto valueOf(int value) {
076      switch (value) {
077        case 0: return RPC_BUILTIN;
078        case 1: return RPC_WRITABLE;
079        case 2: return RPC_PROTOCOL_BUFFER;
080        default: return null;
081      }
082    }
083
084    public static com.google.protobuf.Internal.EnumLiteMap<RpcKindProto>
085        internalGetValueMap() {
086      return internalValueMap;
087    }
088    private static com.google.protobuf.Internal.EnumLiteMap<RpcKindProto>
089        internalValueMap =
090          new com.google.protobuf.Internal.EnumLiteMap<RpcKindProto>() {
091            public RpcKindProto findValueByNumber(int number) {
092              return RpcKindProto.valueOf(number);
093            }
094          };
095
096    public final com.google.protobuf.Descriptors.EnumValueDescriptor
097        getValueDescriptor() {
098      return getDescriptor().getValues().get(index);
099    }
100    public final com.google.protobuf.Descriptors.EnumDescriptor
101        getDescriptorForType() {
102      return getDescriptor();
103    }
104    public static final com.google.protobuf.Descriptors.EnumDescriptor
105        getDescriptor() {
106      return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.getDescriptor().getEnumTypes().get(0);
107    }
108
109    private static final RpcKindProto[] VALUES = values();
110
111    public static RpcKindProto valueOf(
112        com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
113      if (desc.getType() != getDescriptor()) {
114        throw new java.lang.IllegalArgumentException(
115          "EnumValueDescriptor is not for this type.");
116      }
117      return VALUES[desc.getIndex()];
118    }
119
120    private final int index;
121    private final int value;
122
123    private RpcKindProto(int index, int value) {
124      this.index = index;
125      this.value = value;
126    }
127
128    // @@protoc_insertion_point(enum_scope:hadoop.common.RpcKindProto)
129  }
130
131  public interface RpcRequestHeaderProtoOrBuilder
132      extends com.google.protobuf.MessageOrBuilder {
133
134    // optional .hadoop.common.RpcKindProto rpcKind = 1;
135    /**
136     * <code>optional .hadoop.common.RpcKindProto rpcKind = 1;</code>
137     */
138    boolean hasRpcKind();
139    /**
140     * <code>optional .hadoop.common.RpcKindProto rpcKind = 1;</code>
141     */
142    org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto getRpcKind();
143
144    // optional .hadoop.common.RpcRequestHeaderProto.OperationProto rpcOp = 2;
145    /**
146     * <code>optional .hadoop.common.RpcRequestHeaderProto.OperationProto rpcOp = 2;</code>
147     */
148    boolean hasRpcOp();
149    /**
150     * <code>optional .hadoop.common.RpcRequestHeaderProto.OperationProto rpcOp = 2;</code>
151     */
152    org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto getRpcOp();
153
154    // required sint32 callId = 3;
155    /**
156     * <code>required sint32 callId = 3;</code>
157     *
158     * <pre>
159     * a sequence number that is sent back in response
160     * </pre>
161     */
162    boolean hasCallId();
163    /**
164     * <code>required sint32 callId = 3;</code>
165     *
166     * <pre>
167     * a sequence number that is sent back in response
168     * </pre>
169     */
170    int getCallId();
171
172    // required bytes clientId = 4;
173    /**
174     * <code>required bytes clientId = 4;</code>
175     *
176     * <pre>
177     * Globally unique client ID
178     * </pre>
179     */
180    boolean hasClientId();
181    /**
182     * <code>required bytes clientId = 4;</code>
183     *
184     * <pre>
185     * Globally unique client ID
186     * </pre>
187     */
188    com.google.protobuf.ByteString getClientId();
189
190    // optional sint32 retryCount = 5 [default = -1];
191    /**
192     * <code>optional sint32 retryCount = 5 [default = -1];</code>
193     *
194     * <pre>
195     * clientId + callId uniquely identifies a request
196     * retry count, 1 means this is the first retry
197     * </pre>
198     */
199    boolean hasRetryCount();
200    /**
201     * <code>optional sint32 retryCount = 5 [default = -1];</code>
202     *
203     * <pre>
204     * clientId + callId uniquely identifies a request
205     * retry count, 1 means this is the first retry
206     * </pre>
207     */
208    int getRetryCount();
209  }
210  /**
211   * Protobuf type {@code hadoop.common.RpcRequestHeaderProto}
212   *
213   * <pre>
214   * the header for the RpcRequest
215   * </pre>
216   */
217  public static final class RpcRequestHeaderProto extends
218      com.google.protobuf.GeneratedMessage
219      implements RpcRequestHeaderProtoOrBuilder {
220    // Use RpcRequestHeaderProto.newBuilder() to construct.
221    private RpcRequestHeaderProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
222      super(builder);
223      this.unknownFields = builder.getUnknownFields();
224    }
225    private RpcRequestHeaderProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
226
227    private static final RpcRequestHeaderProto defaultInstance;
228    public static RpcRequestHeaderProto getDefaultInstance() {
229      return defaultInstance;
230    }
231
232    public RpcRequestHeaderProto getDefaultInstanceForType() {
233      return defaultInstance;
234    }
235
236    private final com.google.protobuf.UnknownFieldSet unknownFields;
237    @java.lang.Override
238    public final com.google.protobuf.UnknownFieldSet
239        getUnknownFields() {
240      return this.unknownFields;
241    }
242    private RpcRequestHeaderProto(
243        com.google.protobuf.CodedInputStream input,
244        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
245        throws com.google.protobuf.InvalidProtocolBufferException {
246      initFields();
247      int mutable_bitField0_ = 0;
248      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
249          com.google.protobuf.UnknownFieldSet.newBuilder();
250      try {
251        boolean done = false;
252        while (!done) {
253          int tag = input.readTag();
254          switch (tag) {
255            case 0:
256              done = true;
257              break;
258            default: {
259              if (!parseUnknownField(input, unknownFields,
260                                     extensionRegistry, tag)) {
261                done = true;
262              }
263              break;
264            }
265            case 8: {
266              int rawValue = input.readEnum();
267              org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto value = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto.valueOf(rawValue);
268              if (value == null) {
269                unknownFields.mergeVarintField(1, rawValue);
270              } else {
271                bitField0_ |= 0x00000001;
272                rpcKind_ = value;
273              }
274              break;
275            }
276            case 16: {
277              int rawValue = input.readEnum();
278              org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto value = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto.valueOf(rawValue);
279              if (value == null) {
280                unknownFields.mergeVarintField(2, rawValue);
281              } else {
282                bitField0_ |= 0x00000002;
283                rpcOp_ = value;
284              }
285              break;
286            }
287            case 24: {
288              bitField0_ |= 0x00000004;
289              callId_ = input.readSInt32();
290              break;
291            }
292            case 34: {
293              bitField0_ |= 0x00000008;
294              clientId_ = input.readBytes();
295              break;
296            }
297            case 40: {
298              bitField0_ |= 0x00000010;
299              retryCount_ = input.readSInt32();
300              break;
301            }
302          }
303        }
304      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
305        throw e.setUnfinishedMessage(this);
306      } catch (java.io.IOException e) {
307        throw new com.google.protobuf.InvalidProtocolBufferException(
308            e.getMessage()).setUnfinishedMessage(this);
309      } finally {
310        this.unknownFields = unknownFields.build();
311        makeExtensionsImmutable();
312      }
313    }
314    public static final com.google.protobuf.Descriptors.Descriptor
315        getDescriptor() {
316      return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcRequestHeaderProto_descriptor;
317    }
318
319    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
320        internalGetFieldAccessorTable() {
321      return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcRequestHeaderProto_fieldAccessorTable
322          .ensureFieldAccessorsInitialized(
323              org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.Builder.class);
324    }
325
326    public static com.google.protobuf.Parser<RpcRequestHeaderProto> PARSER =
327        new com.google.protobuf.AbstractParser<RpcRequestHeaderProto>() {
328      public RpcRequestHeaderProto parsePartialFrom(
329          com.google.protobuf.CodedInputStream input,
330          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
331          throws com.google.protobuf.InvalidProtocolBufferException {
332        return new RpcRequestHeaderProto(input, extensionRegistry);
333      }
334    };
335
336    @java.lang.Override
337    public com.google.protobuf.Parser<RpcRequestHeaderProto> getParserForType() {
338      return PARSER;
339    }
340
341    /**
342     * Protobuf enum {@code hadoop.common.RpcRequestHeaderProto.OperationProto}
343     */
344    public enum OperationProto
345        implements com.google.protobuf.ProtocolMessageEnum {
346      /**
347       * <code>RPC_FINAL_PACKET = 0;</code>
348       *
349       * <pre>
350       * The final RPC Packet
351       * </pre>
352       */
353      RPC_FINAL_PACKET(0, 0),
354      /**
355       * <code>RPC_CONTINUATION_PACKET = 1;</code>
356       *
357       * <pre>
358       * not implemented yet
359       * </pre>
360       */
361      RPC_CONTINUATION_PACKET(1, 1),
362      /**
363       * <code>RPC_CLOSE_CONNECTION = 2;</code>
364       *
365       * <pre>
366       * close the rpc connection
367       * </pre>
368       */
369      RPC_CLOSE_CONNECTION(2, 2),
370      ;
371
372      /**
373       * <code>RPC_FINAL_PACKET = 0;</code>
374       *
375       * <pre>
376       * The final RPC Packet
377       * </pre>
378       */
379      public static final int RPC_FINAL_PACKET_VALUE = 0;
380      /**
381       * <code>RPC_CONTINUATION_PACKET = 1;</code>
382       *
383       * <pre>
384       * not implemented yet
385       * </pre>
386       */
387      public static final int RPC_CONTINUATION_PACKET_VALUE = 1;
388      /**
389       * <code>RPC_CLOSE_CONNECTION = 2;</code>
390       *
391       * <pre>
392       * close the rpc connection
393       * </pre>
394       */
395      public static final int RPC_CLOSE_CONNECTION_VALUE = 2;
396
397
398      public final int getNumber() { return value; }
399
400      public static OperationProto valueOf(int value) {
401        switch (value) {
402          case 0: return RPC_FINAL_PACKET;
403          case 1: return RPC_CONTINUATION_PACKET;
404          case 2: return RPC_CLOSE_CONNECTION;
405          default: return null;
406        }
407      }
408
409      public static com.google.protobuf.Internal.EnumLiteMap<OperationProto>
410          internalGetValueMap() {
411        return internalValueMap;
412      }
413      private static com.google.protobuf.Internal.EnumLiteMap<OperationProto>
414          internalValueMap =
415            new com.google.protobuf.Internal.EnumLiteMap<OperationProto>() {
416              public OperationProto findValueByNumber(int number) {
417                return OperationProto.valueOf(number);
418              }
419            };
420
421      public final com.google.protobuf.Descriptors.EnumValueDescriptor
422          getValueDescriptor() {
423        return getDescriptor().getValues().get(index);
424      }
425      public final com.google.protobuf.Descriptors.EnumDescriptor
426          getDescriptorForType() {
427        return getDescriptor();
428      }
429      public static final com.google.protobuf.Descriptors.EnumDescriptor
430          getDescriptor() {
431        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.getDescriptor().getEnumTypes().get(0);
432      }
433
434      private static final OperationProto[] VALUES = values();
435
436      public static OperationProto valueOf(
437          com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
438        if (desc.getType() != getDescriptor()) {
439          throw new java.lang.IllegalArgumentException(
440            "EnumValueDescriptor is not for this type.");
441        }
442        return VALUES[desc.getIndex()];
443      }
444
445      private final int index;
446      private final int value;
447
448      private OperationProto(int index, int value) {
449        this.index = index;
450        this.value = value;
451      }
452
453      // @@protoc_insertion_point(enum_scope:hadoop.common.RpcRequestHeaderProto.OperationProto)
454    }
455
456    private int bitField0_;
457    // optional .hadoop.common.RpcKindProto rpcKind = 1;
458    public static final int RPCKIND_FIELD_NUMBER = 1;
459    private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto rpcKind_;
460    /**
461     * <code>optional .hadoop.common.RpcKindProto rpcKind = 1;</code>
462     */
463    public boolean hasRpcKind() {
464      return ((bitField0_ & 0x00000001) == 0x00000001);
465    }
466    /**
467     * <code>optional .hadoop.common.RpcKindProto rpcKind = 1;</code>
468     */
469    public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto getRpcKind() {
470      return rpcKind_;
471    }
472
473    // optional .hadoop.common.RpcRequestHeaderProto.OperationProto rpcOp = 2;
474    public static final int RPCOP_FIELD_NUMBER = 2;
475    private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto rpcOp_;
476    /**
477     * <code>optional .hadoop.common.RpcRequestHeaderProto.OperationProto rpcOp = 2;</code>
478     */
479    public boolean hasRpcOp() {
480      return ((bitField0_ & 0x00000002) == 0x00000002);
481    }
482    /**
483     * <code>optional .hadoop.common.RpcRequestHeaderProto.OperationProto rpcOp = 2;</code>
484     */
485    public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto getRpcOp() {
486      return rpcOp_;
487    }
488
489    // required sint32 callId = 3;
490    public static final int CALLID_FIELD_NUMBER = 3;
491    private int callId_;
492    /**
493     * <code>required sint32 callId = 3;</code>
494     *
495     * <pre>
496     * a sequence number that is sent back in response
497     * </pre>
498     */
499    public boolean hasCallId() {
500      return ((bitField0_ & 0x00000004) == 0x00000004);
501    }
502    /**
503     * <code>required sint32 callId = 3;</code>
504     *
505     * <pre>
506     * a sequence number that is sent back in response
507     * </pre>
508     */
509    public int getCallId() {
510      return callId_;
511    }
512
513    // required bytes clientId = 4;
514    public static final int CLIENTID_FIELD_NUMBER = 4;
515    private com.google.protobuf.ByteString clientId_;
516    /**
517     * <code>required bytes clientId = 4;</code>
518     *
519     * <pre>
520     * Globally unique client ID
521     * </pre>
522     */
523    public boolean hasClientId() {
524      return ((bitField0_ & 0x00000008) == 0x00000008);
525    }
526    /**
527     * <code>required bytes clientId = 4;</code>
528     *
529     * <pre>
530     * Globally unique client ID
531     * </pre>
532     */
533    public com.google.protobuf.ByteString getClientId() {
534      return clientId_;
535    }
536
537    // optional sint32 retryCount = 5 [default = -1];
538    public static final int RETRYCOUNT_FIELD_NUMBER = 5;
539    private int retryCount_;
540    /**
541     * <code>optional sint32 retryCount = 5 [default = -1];</code>
542     *
543     * <pre>
544     * clientId + callId uniquely identifies a request
545     * retry count, 1 means this is the first retry
546     * </pre>
547     */
548    public boolean hasRetryCount() {
549      return ((bitField0_ & 0x00000010) == 0x00000010);
550    }
551    /**
552     * <code>optional sint32 retryCount = 5 [default = -1];</code>
553     *
554     * <pre>
555     * clientId + callId uniquely identifies a request
556     * retry count, 1 means this is the first retry
557     * </pre>
558     */
559    public int getRetryCount() {
560      return retryCount_;
561    }
562
563    private void initFields() {
564      rpcKind_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto.RPC_BUILTIN;
565      rpcOp_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto.RPC_FINAL_PACKET;
566      callId_ = 0;
567      clientId_ = com.google.protobuf.ByteString.EMPTY;
568      retryCount_ = -1;
569    }
570    private byte memoizedIsInitialized = -1;
571    public final boolean isInitialized() {
572      byte isInitialized = memoizedIsInitialized;
573      if (isInitialized != -1) return isInitialized == 1;
574
575      if (!hasCallId()) {
576        memoizedIsInitialized = 0;
577        return false;
578      }
579      if (!hasClientId()) {
580        memoizedIsInitialized = 0;
581        return false;
582      }
583      memoizedIsInitialized = 1;
584      return true;
585    }
586
587    public void writeTo(com.google.protobuf.CodedOutputStream output)
588                        throws java.io.IOException {
589      getSerializedSize();
590      if (((bitField0_ & 0x00000001) == 0x00000001)) {
591        output.writeEnum(1, rpcKind_.getNumber());
592      }
593      if (((bitField0_ & 0x00000002) == 0x00000002)) {
594        output.writeEnum(2, rpcOp_.getNumber());
595      }
596      if (((bitField0_ & 0x00000004) == 0x00000004)) {
597        output.writeSInt32(3, callId_);
598      }
599      if (((bitField0_ & 0x00000008) == 0x00000008)) {
600        output.writeBytes(4, clientId_);
601      }
602      if (((bitField0_ & 0x00000010) == 0x00000010)) {
603        output.writeSInt32(5, retryCount_);
604      }
605      getUnknownFields().writeTo(output);
606    }
607
608    private int memoizedSerializedSize = -1;
609    public int getSerializedSize() {
610      int size = memoizedSerializedSize;
611      if (size != -1) return size;
612
613      size = 0;
614      if (((bitField0_ & 0x00000001) == 0x00000001)) {
615        size += com.google.protobuf.CodedOutputStream
616          .computeEnumSize(1, rpcKind_.getNumber());
617      }
618      if (((bitField0_ & 0x00000002) == 0x00000002)) {
619        size += com.google.protobuf.CodedOutputStream
620          .computeEnumSize(2, rpcOp_.getNumber());
621      }
622      if (((bitField0_ & 0x00000004) == 0x00000004)) {
623        size += com.google.protobuf.CodedOutputStream
624          .computeSInt32Size(3, callId_);
625      }
626      if (((bitField0_ & 0x00000008) == 0x00000008)) {
627        size += com.google.protobuf.CodedOutputStream
628          .computeBytesSize(4, clientId_);
629      }
630      if (((bitField0_ & 0x00000010) == 0x00000010)) {
631        size += com.google.protobuf.CodedOutputStream
632          .computeSInt32Size(5, retryCount_);
633      }
634      size += getUnknownFields().getSerializedSize();
635      memoizedSerializedSize = size;
636      return size;
637    }
638
639    private static final long serialVersionUID = 0L;
640    @java.lang.Override
641    protected java.lang.Object writeReplace()
642        throws java.io.ObjectStreamException {
643      return super.writeReplace();
644    }
645
646    @java.lang.Override
647    public boolean equals(final java.lang.Object obj) {
648      if (obj == this) {
649       return true;
650      }
651      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto)) {
652        return super.equals(obj);
653      }
654      org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto other = (org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto) obj;
655
656      boolean result = true;
657      result = result && (hasRpcKind() == other.hasRpcKind());
658      if (hasRpcKind()) {
659        result = result &&
660            (getRpcKind() == other.getRpcKind());
661      }
662      result = result && (hasRpcOp() == other.hasRpcOp());
663      if (hasRpcOp()) {
664        result = result &&
665            (getRpcOp() == other.getRpcOp());
666      }
667      result = result && (hasCallId() == other.hasCallId());
668      if (hasCallId()) {
669        result = result && (getCallId()
670            == other.getCallId());
671      }
672      result = result && (hasClientId() == other.hasClientId());
673      if (hasClientId()) {
674        result = result && getClientId()
675            .equals(other.getClientId());
676      }
677      result = result && (hasRetryCount() == other.hasRetryCount());
678      if (hasRetryCount()) {
679        result = result && (getRetryCount()
680            == other.getRetryCount());
681      }
682      result = result &&
683          getUnknownFields().equals(other.getUnknownFields());
684      return result;
685    }
686
687    private int memoizedHashCode = 0;
688    @java.lang.Override
689    public int hashCode() {
690      if (memoizedHashCode != 0) {
691        return memoizedHashCode;
692      }
693      int hash = 41;
694      hash = (19 * hash) + getDescriptorForType().hashCode();
695      if (hasRpcKind()) {
696        hash = (37 * hash) + RPCKIND_FIELD_NUMBER;
697        hash = (53 * hash) + hashEnum(getRpcKind());
698      }
699      if (hasRpcOp()) {
700        hash = (37 * hash) + RPCOP_FIELD_NUMBER;
701        hash = (53 * hash) + hashEnum(getRpcOp());
702      }
703      if (hasCallId()) {
704        hash = (37 * hash) + CALLID_FIELD_NUMBER;
705        hash = (53 * hash) + getCallId();
706      }
707      if (hasClientId()) {
708        hash = (37 * hash) + CLIENTID_FIELD_NUMBER;
709        hash = (53 * hash) + getClientId().hashCode();
710      }
711      if (hasRetryCount()) {
712        hash = (37 * hash) + RETRYCOUNT_FIELD_NUMBER;
713        hash = (53 * hash) + getRetryCount();
714      }
715      hash = (29 * hash) + getUnknownFields().hashCode();
716      memoizedHashCode = hash;
717      return hash;
718    }
719
720    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parseFrom(
721        com.google.protobuf.ByteString data)
722        throws com.google.protobuf.InvalidProtocolBufferException {
723      return PARSER.parseFrom(data);
724    }
725    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parseFrom(
726        com.google.protobuf.ByteString data,
727        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
728        throws com.google.protobuf.InvalidProtocolBufferException {
729      return PARSER.parseFrom(data, extensionRegistry);
730    }
731    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parseFrom(byte[] data)
732        throws com.google.protobuf.InvalidProtocolBufferException {
733      return PARSER.parseFrom(data);
734    }
735    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parseFrom(
736        byte[] data,
737        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
738        throws com.google.protobuf.InvalidProtocolBufferException {
739      return PARSER.parseFrom(data, extensionRegistry);
740    }
741    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parseFrom(java.io.InputStream input)
742        throws java.io.IOException {
743      return PARSER.parseFrom(input);
744    }
745    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parseFrom(
746        java.io.InputStream input,
747        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
748        throws java.io.IOException {
749      return PARSER.parseFrom(input, extensionRegistry);
750    }
751    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parseDelimitedFrom(java.io.InputStream input)
752        throws java.io.IOException {
753      return PARSER.parseDelimitedFrom(input);
754    }
755    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parseDelimitedFrom(
756        java.io.InputStream input,
757        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
758        throws java.io.IOException {
759      return PARSER.parseDelimitedFrom(input, extensionRegistry);
760    }
761    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parseFrom(
762        com.google.protobuf.CodedInputStream input)
763        throws java.io.IOException {
764      return PARSER.parseFrom(input);
765    }
766    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parseFrom(
767        com.google.protobuf.CodedInputStream input,
768        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
769        throws java.io.IOException {
770      return PARSER.parseFrom(input, extensionRegistry);
771    }
772
773    public static Builder newBuilder() { return Builder.create(); }
774    public Builder newBuilderForType() { return newBuilder(); }
775    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto prototype) {
776      return newBuilder().mergeFrom(prototype);
777    }
778    public Builder toBuilder() { return newBuilder(this); }
779
780    @java.lang.Override
781    protected Builder newBuilderForType(
782        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
783      Builder builder = new Builder(parent);
784      return builder;
785    }
786    /**
787     * Protobuf type {@code hadoop.common.RpcRequestHeaderProto}
788     *
789     * <pre>
790     * the header for the RpcRequest
791     * </pre>
792     */
793    public static final class Builder extends
794        com.google.protobuf.GeneratedMessage.Builder<Builder>
795       implements org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProtoOrBuilder {
796      public static final com.google.protobuf.Descriptors.Descriptor
797          getDescriptor() {
798        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcRequestHeaderProto_descriptor;
799      }
800
801      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
802          internalGetFieldAccessorTable() {
803        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcRequestHeaderProto_fieldAccessorTable
804            .ensureFieldAccessorsInitialized(
805                org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.Builder.class);
806      }
807
808      // Construct using org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.newBuilder()
809      private Builder() {
810        maybeForceBuilderInitialization();
811      }
812
813      private Builder(
814          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
815        super(parent);
816        maybeForceBuilderInitialization();
817      }
818      private void maybeForceBuilderInitialization() {
819        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
820        }
821      }
822      private static Builder create() {
823        return new Builder();
824      }
825
826      public Builder clear() {
827        super.clear();
828        rpcKind_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto.RPC_BUILTIN;
829        bitField0_ = (bitField0_ & ~0x00000001);
830        rpcOp_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto.RPC_FINAL_PACKET;
831        bitField0_ = (bitField0_ & ~0x00000002);
832        callId_ = 0;
833        bitField0_ = (bitField0_ & ~0x00000004);
834        clientId_ = com.google.protobuf.ByteString.EMPTY;
835        bitField0_ = (bitField0_ & ~0x00000008);
836        retryCount_ = -1;
837        bitField0_ = (bitField0_ & ~0x00000010);
838        return this;
839      }
840
841      public Builder clone() {
842        return create().mergeFrom(buildPartial());
843      }
844
845      public com.google.protobuf.Descriptors.Descriptor
846          getDescriptorForType() {
847        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcRequestHeaderProto_descriptor;
848      }
849
850      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto getDefaultInstanceForType() {
851        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.getDefaultInstance();
852      }
853
854      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto build() {
855        org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto result = buildPartial();
856        if (!result.isInitialized()) {
857          throw newUninitializedMessageException(result);
858        }
859        return result;
860      }
861
862      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto buildPartial() {
863        org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto result = new org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto(this);
864        int from_bitField0_ = bitField0_;
865        int to_bitField0_ = 0;
866        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
867          to_bitField0_ |= 0x00000001;
868        }
869        result.rpcKind_ = rpcKind_;
870        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
871          to_bitField0_ |= 0x00000002;
872        }
873        result.rpcOp_ = rpcOp_;
874        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
875          to_bitField0_ |= 0x00000004;
876        }
877        result.callId_ = callId_;
878        if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
879          to_bitField0_ |= 0x00000008;
880        }
881        result.clientId_ = clientId_;
882        if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
883          to_bitField0_ |= 0x00000010;
884        }
885        result.retryCount_ = retryCount_;
886        result.bitField0_ = to_bitField0_;
887        onBuilt();
888        return result;
889      }
890
891      public Builder mergeFrom(com.google.protobuf.Message other) {
892        if (other instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto) {
893          return mergeFrom((org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto)other);
894        } else {
895          super.mergeFrom(other);
896          return this;
897        }
898      }
899
900      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto other) {
901        if (other == org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.getDefaultInstance()) return this;
902        if (other.hasRpcKind()) {
903          setRpcKind(other.getRpcKind());
904        }
905        if (other.hasRpcOp()) {
906          setRpcOp(other.getRpcOp());
907        }
908        if (other.hasCallId()) {
909          setCallId(other.getCallId());
910        }
911        if (other.hasClientId()) {
912          setClientId(other.getClientId());
913        }
914        if (other.hasRetryCount()) {
915          setRetryCount(other.getRetryCount());
916        }
917        this.mergeUnknownFields(other.getUnknownFields());
918        return this;
919      }
920
921      public final boolean isInitialized() {
922        if (!hasCallId()) {
923          
924          return false;
925        }
926        if (!hasClientId()) {
927          
928          return false;
929        }
930        return true;
931      }
932
933      public Builder mergeFrom(
934          com.google.protobuf.CodedInputStream input,
935          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
936          throws java.io.IOException {
937        org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parsedMessage = null;
938        try {
939          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
940        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
941          parsedMessage = (org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto) e.getUnfinishedMessage();
942          throw e;
943        } finally {
944          if (parsedMessage != null) {
945            mergeFrom(parsedMessage);
946          }
947        }
948        return this;
949      }
950      private int bitField0_;
951
952      // optional .hadoop.common.RpcKindProto rpcKind = 1;
953      private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto rpcKind_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto.RPC_BUILTIN;
954      /**
955       * <code>optional .hadoop.common.RpcKindProto rpcKind = 1;</code>
956       */
957      public boolean hasRpcKind() {
958        return ((bitField0_ & 0x00000001) == 0x00000001);
959      }
960      /**
961       * <code>optional .hadoop.common.RpcKindProto rpcKind = 1;</code>
962       */
963      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto getRpcKind() {
964        return rpcKind_;
965      }
966      /**
967       * <code>optional .hadoop.common.RpcKindProto rpcKind = 1;</code>
968       */
969      public Builder setRpcKind(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto value) {
970        if (value == null) {
971          throw new NullPointerException();
972        }
973        bitField0_ |= 0x00000001;
974        rpcKind_ = value;
975        onChanged();
976        return this;
977      }
978      /**
979       * <code>optional .hadoop.common.RpcKindProto rpcKind = 1;</code>
980       */
981      public Builder clearRpcKind() {
982        bitField0_ = (bitField0_ & ~0x00000001);
983        rpcKind_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto.RPC_BUILTIN;
984        onChanged();
985        return this;
986      }
987
988      // optional .hadoop.common.RpcRequestHeaderProto.OperationProto rpcOp = 2;
989      private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto rpcOp_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto.RPC_FINAL_PACKET;
990      /**
991       * <code>optional .hadoop.common.RpcRequestHeaderProto.OperationProto rpcOp = 2;</code>
992       */
993      public boolean hasRpcOp() {
994        return ((bitField0_ & 0x00000002) == 0x00000002);
995      }
996      /**
997       * <code>optional .hadoop.common.RpcRequestHeaderProto.OperationProto rpcOp = 2;</code>
998       */
999      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto getRpcOp() {
1000        return rpcOp_;
1001      }
1002      /**
1003       * <code>optional .hadoop.common.RpcRequestHeaderProto.OperationProto rpcOp = 2;</code>
1004       */
1005      public Builder setRpcOp(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto value) {
1006        if (value == null) {
1007          throw new NullPointerException();
1008        }
1009        bitField0_ |= 0x00000002;
1010        rpcOp_ = value;
1011        onChanged();
1012        return this;
1013      }
1014      /**
1015       * <code>optional .hadoop.common.RpcRequestHeaderProto.OperationProto rpcOp = 2;</code>
1016       */
1017      public Builder clearRpcOp() {
1018        bitField0_ = (bitField0_ & ~0x00000002);
1019        rpcOp_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto.RPC_FINAL_PACKET;
1020        onChanged();
1021        return this;
1022      }
1023
1024      // required sint32 callId = 3;
1025      private int callId_ ;
1026      /**
1027       * <code>required sint32 callId = 3;</code>
1028       *
1029       * <pre>
1030       * a sequence number that is sent back in response
1031       * </pre>
1032       */
1033      public boolean hasCallId() {
1034        return ((bitField0_ & 0x00000004) == 0x00000004);
1035      }
1036      /**
1037       * <code>required sint32 callId = 3;</code>
1038       *
1039       * <pre>
1040       * a sequence number that is sent back in response
1041       * </pre>
1042       */
1043      public int getCallId() {
1044        return callId_;
1045      }
1046      /**
1047       * <code>required sint32 callId = 3;</code>
1048       *
1049       * <pre>
1050       * a sequence number that is sent back in response
1051       * </pre>
1052       */
1053      public Builder setCallId(int value) {
1054        bitField0_ |= 0x00000004;
1055        callId_ = value;
1056        onChanged();
1057        return this;
1058      }
1059      /**
1060       * <code>required sint32 callId = 3;</code>
1061       *
1062       * <pre>
1063       * a sequence number that is sent back in response
1064       * </pre>
1065       */
1066      public Builder clearCallId() {
1067        bitField0_ = (bitField0_ & ~0x00000004);
1068        callId_ = 0;
1069        onChanged();
1070        return this;
1071      }
1072
1073      // required bytes clientId = 4;
1074      private com.google.protobuf.ByteString clientId_ = com.google.protobuf.ByteString.EMPTY;
1075      /**
1076       * <code>required bytes clientId = 4;</code>
1077       *
1078       * <pre>
1079       * Globally unique client ID
1080       * </pre>
1081       */
1082      public boolean hasClientId() {
1083        return ((bitField0_ & 0x00000008) == 0x00000008);
1084      }
1085      /**
1086       * <code>required bytes clientId = 4;</code>
1087       *
1088       * <pre>
1089       * Globally unique client ID
1090       * </pre>
1091       */
1092      public com.google.protobuf.ByteString getClientId() {
1093        return clientId_;
1094      }
1095      /**
1096       * <code>required bytes clientId = 4;</code>
1097       *
1098       * <pre>
1099       * Globally unique client ID
1100       * </pre>
1101       */
1102      public Builder setClientId(com.google.protobuf.ByteString value) {
1103        if (value == null) {
1104    throw new NullPointerException();
1105  }
1106  bitField0_ |= 0x00000008;
1107        clientId_ = value;
1108        onChanged();
1109        return this;
1110      }
1111      /**
1112       * <code>required bytes clientId = 4;</code>
1113       *
1114       * <pre>
1115       * Globally unique client ID
1116       * </pre>
1117       */
1118      public Builder clearClientId() {
1119        bitField0_ = (bitField0_ & ~0x00000008);
1120        clientId_ = getDefaultInstance().getClientId();
1121        onChanged();
1122        return this;
1123      }
1124
1125      // optional sint32 retryCount = 5 [default = -1];
1126      private int retryCount_ = -1;
1127      /**
1128       * <code>optional sint32 retryCount = 5 [default = -1];</code>
1129       *
1130       * <pre>
1131       * clientId + callId uniquely identifies a request
1132       * retry count, 1 means this is the first retry
1133       * </pre>
1134       */
1135      public boolean hasRetryCount() {
1136        return ((bitField0_ & 0x00000010) == 0x00000010);
1137      }
1138      /**
1139       * <code>optional sint32 retryCount = 5 [default = -1];</code>
1140       *
1141       * <pre>
1142       * clientId + callId uniquely identifies a request
1143       * retry count, 1 means this is the first retry
1144       * </pre>
1145       */
1146      public int getRetryCount() {
1147        return retryCount_;
1148      }
1149      /**
1150       * <code>optional sint32 retryCount = 5 [default = -1];</code>
1151       *
1152       * <pre>
1153       * clientId + callId uniquely identifies a request
1154       * retry count, 1 means this is the first retry
1155       * </pre>
1156       */
1157      public Builder setRetryCount(int value) {
1158        bitField0_ |= 0x00000010;
1159        retryCount_ = value;
1160        onChanged();
1161        return this;
1162      }
1163      /**
1164       * <code>optional sint32 retryCount = 5 [default = -1];</code>
1165       *
1166       * <pre>
1167       * clientId + callId uniquely identifies a request
1168       * retry count, 1 means this is the first retry
1169       * </pre>
1170       */
1171      public Builder clearRetryCount() {
1172        bitField0_ = (bitField0_ & ~0x00000010);
1173        retryCount_ = -1;
1174        onChanged();
1175        return this;
1176      }
1177
1178      // @@protoc_insertion_point(builder_scope:hadoop.common.RpcRequestHeaderProto)
1179    }
1180
1181    static {
1182      defaultInstance = new RpcRequestHeaderProto(true);
1183      defaultInstance.initFields();
1184    }
1185
1186    // @@protoc_insertion_point(class_scope:hadoop.common.RpcRequestHeaderProto)
1187  }
1188
1189  public interface RpcResponseHeaderProtoOrBuilder
1190      extends com.google.protobuf.MessageOrBuilder {
1191
1192    // required uint32 callId = 1;
1193    /**
1194     * <code>required uint32 callId = 1;</code>
1195     *
1196     * <pre>
1197     * callId used in Request
1198     * </pre>
1199     */
1200    boolean hasCallId();
1201    /**
1202     * <code>required uint32 callId = 1;</code>
1203     *
1204     * <pre>
1205     * callId used in Request
1206     * </pre>
1207     */
1208    int getCallId();
1209
1210    // required .hadoop.common.RpcResponseHeaderProto.RpcStatusProto status = 2;
1211    /**
1212     * <code>required .hadoop.common.RpcResponseHeaderProto.RpcStatusProto status = 2;</code>
1213     */
1214    boolean hasStatus();
1215    /**
1216     * <code>required .hadoop.common.RpcResponseHeaderProto.RpcStatusProto status = 2;</code>
1217     */
1218    org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto getStatus();
1219
1220    // optional uint32 serverIpcVersionNum = 3;
1221    /**
1222     * <code>optional uint32 serverIpcVersionNum = 3;</code>
1223     *
1224     * <pre>
1225     * Sent if success or fail
1226     * </pre>
1227     */
1228    boolean hasServerIpcVersionNum();
1229    /**
1230     * <code>optional uint32 serverIpcVersionNum = 3;</code>
1231     *
1232     * <pre>
1233     * Sent if success or fail
1234     * </pre>
1235     */
1236    int getServerIpcVersionNum();
1237
1238    // optional string exceptionClassName = 4;
1239    /**
1240     * <code>optional string exceptionClassName = 4;</code>
1241     *
1242     * <pre>
1243     * if request fails
1244     * </pre>
1245     */
1246    boolean hasExceptionClassName();
1247    /**
1248     * <code>optional string exceptionClassName = 4;</code>
1249     *
1250     * <pre>
1251     * if request fails
1252     * </pre>
1253     */
1254    java.lang.String getExceptionClassName();
1255    /**
1256     * <code>optional string exceptionClassName = 4;</code>
1257     *
1258     * <pre>
1259     * if request fails
1260     * </pre>
1261     */
1262    com.google.protobuf.ByteString
1263        getExceptionClassNameBytes();
1264
1265    // optional string errorMsg = 5;
1266    /**
1267     * <code>optional string errorMsg = 5;</code>
1268     *
1269     * <pre>
1270     * if request fails, often contains strack trace
1271     * </pre>
1272     */
1273    boolean hasErrorMsg();
1274    /**
1275     * <code>optional string errorMsg = 5;</code>
1276     *
1277     * <pre>
1278     * if request fails, often contains strack trace
1279     * </pre>
1280     */
1281    java.lang.String getErrorMsg();
1282    /**
1283     * <code>optional string errorMsg = 5;</code>
1284     *
1285     * <pre>
1286     * if request fails, often contains strack trace
1287     * </pre>
1288     */
1289    com.google.protobuf.ByteString
1290        getErrorMsgBytes();
1291
1292    // optional .hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail = 6;
1293    /**
1294     * <code>optional .hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail = 6;</code>
1295     *
1296     * <pre>
1297     * in case of error
1298     * </pre>
1299     */
1300    boolean hasErrorDetail();
1301    /**
1302     * <code>optional .hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail = 6;</code>
1303     *
1304     * <pre>
1305     * in case of error
1306     * </pre>
1307     */
1308    org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto getErrorDetail();
1309
1310    // optional bytes clientId = 7;
1311    /**
1312     * <code>optional bytes clientId = 7;</code>
1313     *
1314     * <pre>
1315     * Globally unique client ID
1316     * </pre>
1317     */
1318    boolean hasClientId();
1319    /**
1320     * <code>optional bytes clientId = 7;</code>
1321     *
1322     * <pre>
1323     * Globally unique client ID
1324     * </pre>
1325     */
1326    com.google.protobuf.ByteString getClientId();
1327
1328    // optional sint32 retryCount = 8 [default = -1];
1329    /**
1330     * <code>optional sint32 retryCount = 8 [default = -1];</code>
1331     */
1332    boolean hasRetryCount();
1333    /**
1334     * <code>optional sint32 retryCount = 8 [default = -1];</code>
1335     */
1336    int getRetryCount();
1337  }
1338  /**
1339   * Protobuf type {@code hadoop.common.RpcResponseHeaderProto}
1340   *
1341   * <pre>
1342   **
1343   * Rpc Response Header
1344   * +------------------------------------------------------------------+
1345   * | Rpc total response length in bytes (4 bytes int)                 |
1346   * |  (sum of next two parts)                                         |
1347   * +------------------------------------------------------------------+
1348   * | RpcResponseHeaderProto - serialized delimited ie has len         |
1349   * +------------------------------------------------------------------+
1350   * | if request is successful:                                        |
1351   * |   - RpcResponse -  The actual rpc response  bytes follow         |
1352   * |     the response header                                          |
1353   * |     This response is serialized based on RpcKindProto            |
1354   * | if request fails :                                               |
1355   * |   The rpc response header contains the necessary info            |
1356   * +------------------------------------------------------------------+
1357   *
1358   * Note that rpc response header is also used when connection setup fails. 
1359   * Ie the response looks like a rpc response with a fake callId.
1360   * </pre>
1361   */
1362  public static final class RpcResponseHeaderProto extends
1363      com.google.protobuf.GeneratedMessage
1364      implements RpcResponseHeaderProtoOrBuilder {
1365    // Use RpcResponseHeaderProto.newBuilder() to construct.
1366    private RpcResponseHeaderProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
1367      super(builder);
1368      this.unknownFields = builder.getUnknownFields();
1369    }
1370    private RpcResponseHeaderProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
1371
1372    private static final RpcResponseHeaderProto defaultInstance;
1373    public static RpcResponseHeaderProto getDefaultInstance() {
1374      return defaultInstance;
1375    }
1376
1377    public RpcResponseHeaderProto getDefaultInstanceForType() {
1378      return defaultInstance;
1379    }
1380
1381    private final com.google.protobuf.UnknownFieldSet unknownFields;
1382    @java.lang.Override
1383    public final com.google.protobuf.UnknownFieldSet
1384        getUnknownFields() {
1385      return this.unknownFields;
1386    }
1387    private RpcResponseHeaderProto(
1388        com.google.protobuf.CodedInputStream input,
1389        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1390        throws com.google.protobuf.InvalidProtocolBufferException {
1391      initFields();
1392      int mutable_bitField0_ = 0;
1393      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
1394          com.google.protobuf.UnknownFieldSet.newBuilder();
1395      try {
1396        boolean done = false;
1397        while (!done) {
1398          int tag = input.readTag();
1399          switch (tag) {
1400            case 0:
1401              done = true;
1402              break;
1403            default: {
1404              if (!parseUnknownField(input, unknownFields,
1405                                     extensionRegistry, tag)) {
1406                done = true;
1407              }
1408              break;
1409            }
1410            case 8: {
1411              bitField0_ |= 0x00000001;
1412              callId_ = input.readUInt32();
1413              break;
1414            }
1415            case 16: {
1416              int rawValue = input.readEnum();
1417              org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto value = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto.valueOf(rawValue);
1418              if (value == null) {
1419                unknownFields.mergeVarintField(2, rawValue);
1420              } else {
1421                bitField0_ |= 0x00000002;
1422                status_ = value;
1423              }
1424              break;
1425            }
1426            case 24: {
1427              bitField0_ |= 0x00000004;
1428              serverIpcVersionNum_ = input.readUInt32();
1429              break;
1430            }
1431            case 34: {
1432              bitField0_ |= 0x00000008;
1433              exceptionClassName_ = input.readBytes();
1434              break;
1435            }
1436            case 42: {
1437              bitField0_ |= 0x00000010;
1438              errorMsg_ = input.readBytes();
1439              break;
1440            }
1441            case 48: {
1442              int rawValue = input.readEnum();
1443              org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto value = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto.valueOf(rawValue);
1444              if (value == null) {
1445                unknownFields.mergeVarintField(6, rawValue);
1446              } else {
1447                bitField0_ |= 0x00000020;
1448                errorDetail_ = value;
1449              }
1450              break;
1451            }
1452            case 58: {
1453              bitField0_ |= 0x00000040;
1454              clientId_ = input.readBytes();
1455              break;
1456            }
1457            case 64: {
1458              bitField0_ |= 0x00000080;
1459              retryCount_ = input.readSInt32();
1460              break;
1461            }
1462          }
1463        }
1464      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1465        throw e.setUnfinishedMessage(this);
1466      } catch (java.io.IOException e) {
1467        throw new com.google.protobuf.InvalidProtocolBufferException(
1468            e.getMessage()).setUnfinishedMessage(this);
1469      } finally {
1470        this.unknownFields = unknownFields.build();
1471        makeExtensionsImmutable();
1472      }
1473    }
1474    public static final com.google.protobuf.Descriptors.Descriptor
1475        getDescriptor() {
1476      return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcResponseHeaderProto_descriptor;
1477    }
1478
1479    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1480        internalGetFieldAccessorTable() {
1481      return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcResponseHeaderProto_fieldAccessorTable
1482          .ensureFieldAccessorsInitialized(
1483              org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.Builder.class);
1484    }
1485
1486    public static com.google.protobuf.Parser<RpcResponseHeaderProto> PARSER =
1487        new com.google.protobuf.AbstractParser<RpcResponseHeaderProto>() {
1488      public RpcResponseHeaderProto parsePartialFrom(
1489          com.google.protobuf.CodedInputStream input,
1490          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1491          throws com.google.protobuf.InvalidProtocolBufferException {
1492        return new RpcResponseHeaderProto(input, extensionRegistry);
1493      }
1494    };
1495
1496    @java.lang.Override
1497    public com.google.protobuf.Parser<RpcResponseHeaderProto> getParserForType() {
1498      return PARSER;
1499    }
1500
1501    /**
1502     * Protobuf enum {@code hadoop.common.RpcResponseHeaderProto.RpcStatusProto}
1503     */
1504    public enum RpcStatusProto
1505        implements com.google.protobuf.ProtocolMessageEnum {
1506      /**
1507       * <code>SUCCESS = 0;</code>
1508       *
1509       * <pre>
1510       * RPC succeeded
1511       * </pre>
1512       */
1513      SUCCESS(0, 0),
1514      /**
1515       * <code>ERROR = 1;</code>
1516       *
1517       * <pre>
1518       * RPC or error - connection left open for future calls
1519       * </pre>
1520       */
1521      ERROR(1, 1),
1522      /**
1523       * <code>FATAL = 2;</code>
1524       *
1525       * <pre>
1526       * Fatal error - connection closed
1527       * </pre>
1528       */
1529      FATAL(2, 2),
1530      ;
1531
1532      /**
1533       * <code>SUCCESS = 0;</code>
1534       *
1535       * <pre>
1536       * RPC succeeded
1537       * </pre>
1538       */
1539      public static final int SUCCESS_VALUE = 0;
1540      /**
1541       * <code>ERROR = 1;</code>
1542       *
1543       * <pre>
1544       * RPC or error - connection left open for future calls
1545       * </pre>
1546       */
1547      public static final int ERROR_VALUE = 1;
1548      /**
1549       * <code>FATAL = 2;</code>
1550       *
1551       * <pre>
1552       * Fatal error - connection closed
1553       * </pre>
1554       */
1555      public static final int FATAL_VALUE = 2;
1556
1557
1558      public final int getNumber() { return value; }
1559
1560      public static RpcStatusProto valueOf(int value) {
1561        switch (value) {
1562          case 0: return SUCCESS;
1563          case 1: return ERROR;
1564          case 2: return FATAL;
1565          default: return null;
1566        }
1567      }
1568
1569      public static com.google.protobuf.Internal.EnumLiteMap<RpcStatusProto>
1570          internalGetValueMap() {
1571        return internalValueMap;
1572      }
1573      private static com.google.protobuf.Internal.EnumLiteMap<RpcStatusProto>
1574          internalValueMap =
1575            new com.google.protobuf.Internal.EnumLiteMap<RpcStatusProto>() {
1576              public RpcStatusProto findValueByNumber(int number) {
1577                return RpcStatusProto.valueOf(number);
1578              }
1579            };
1580
1581      public final com.google.protobuf.Descriptors.EnumValueDescriptor
1582          getValueDescriptor() {
1583        return getDescriptor().getValues().get(index);
1584      }
1585      public final com.google.protobuf.Descriptors.EnumDescriptor
1586          getDescriptorForType() {
1587        return getDescriptor();
1588      }
1589      public static final com.google.protobuf.Descriptors.EnumDescriptor
1590          getDescriptor() {
1591        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.getDescriptor().getEnumTypes().get(0);
1592      }
1593
1594      private static final RpcStatusProto[] VALUES = values();
1595
1596      public static RpcStatusProto valueOf(
1597          com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
1598        if (desc.getType() != getDescriptor()) {
1599          throw new java.lang.IllegalArgumentException(
1600            "EnumValueDescriptor is not for this type.");
1601        }
1602        return VALUES[desc.getIndex()];
1603      }
1604
1605      private final int index;
1606      private final int value;
1607
1608      private RpcStatusProto(int index, int value) {
1609        this.index = index;
1610        this.value = value;
1611      }
1612
1613      // @@protoc_insertion_point(enum_scope:hadoop.common.RpcResponseHeaderProto.RpcStatusProto)
1614    }
1615
1616    /**
1617     * Protobuf enum {@code hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto}
1618     */
1619    public enum RpcErrorCodeProto
1620        implements com.google.protobuf.ProtocolMessageEnum {
1621      /**
1622       * <code>ERROR_APPLICATION = 1;</code>
1623       *
1624       * <pre>
1625       * Non-fatal Rpc error - connection left open for future rpc calls
1626       * </pre>
1627       */
1628      ERROR_APPLICATION(0, 1),
1629      /**
1630       * <code>ERROR_NO_SUCH_METHOD = 2;</code>
1631       *
1632       * <pre>
1633       * Rpc error - no such method
1634       * </pre>
1635       */
1636      ERROR_NO_SUCH_METHOD(1, 2),
1637      /**
1638       * <code>ERROR_NO_SUCH_PROTOCOL = 3;</code>
1639       *
1640       * <pre>
1641       * Rpc error - no such protocol
1642       * </pre>
1643       */
1644      ERROR_NO_SUCH_PROTOCOL(2, 3),
1645      /**
1646       * <code>ERROR_RPC_SERVER = 4;</code>
1647       *
1648       * <pre>
1649       * Rpc error on server side
1650       * </pre>
1651       */
1652      ERROR_RPC_SERVER(3, 4),
1653      /**
1654       * <code>ERROR_SERIALIZING_RESPONSE = 5;</code>
1655       *
1656       * <pre>
1657       * error serializign response
1658       * </pre>
1659       */
1660      ERROR_SERIALIZING_RESPONSE(4, 5),
1661      /**
1662       * <code>ERROR_RPC_VERSION_MISMATCH = 6;</code>
1663       *
1664       * <pre>
1665       * Rpc protocol version mismatch
1666       * </pre>
1667       */
1668      ERROR_RPC_VERSION_MISMATCH(5, 6),
1669      /**
1670       * <code>FATAL_UNKNOWN = 10;</code>
1671       *
1672       * <pre>
1673       * Fatal Server side Rpc error - connection closed
1674       * </pre>
1675       */
1676      FATAL_UNKNOWN(6, 10),
1677      /**
1678       * <code>FATAL_UNSUPPORTED_SERIALIZATION = 11;</code>
1679       *
1680       * <pre>
1681       * IPC layer serilization type invalid
1682       * </pre>
1683       */
1684      FATAL_UNSUPPORTED_SERIALIZATION(7, 11),
1685      /**
1686       * <code>FATAL_INVALID_RPC_HEADER = 12;</code>
1687       *
1688       * <pre>
1689       * fields of RpcHeader are invalid
1690       * </pre>
1691       */
1692      FATAL_INVALID_RPC_HEADER(8, 12),
1693      /**
1694       * <code>FATAL_DESERIALIZING_REQUEST = 13;</code>
1695       *
1696       * <pre>
1697       * could not deserilize rpc request
1698       * </pre>
1699       */
1700      FATAL_DESERIALIZING_REQUEST(9, 13),
1701      /**
1702       * <code>FATAL_VERSION_MISMATCH = 14;</code>
1703       *
1704       * <pre>
1705       * Ipc Layer version mismatch
1706       * </pre>
1707       */
1708      FATAL_VERSION_MISMATCH(10, 14),
1709      /**
1710       * <code>FATAL_UNAUTHORIZED = 15;</code>
1711       *
1712       * <pre>
1713       * Auth failed
1714       * </pre>
1715       */
1716      FATAL_UNAUTHORIZED(11, 15),
1717      ;
1718
1719      /**
1720       * <code>ERROR_APPLICATION = 1;</code>
1721       *
1722       * <pre>
1723       * Non-fatal Rpc error - connection left open for future rpc calls
1724       * </pre>
1725       */
1726      public static final int ERROR_APPLICATION_VALUE = 1;
1727      /**
1728       * <code>ERROR_NO_SUCH_METHOD = 2;</code>
1729       *
1730       * <pre>
1731       * Rpc error - no such method
1732       * </pre>
1733       */
1734      public static final int ERROR_NO_SUCH_METHOD_VALUE = 2;
1735      /**
1736       * <code>ERROR_NO_SUCH_PROTOCOL = 3;</code>
1737       *
1738       * <pre>
1739       * Rpc error - no such protocol
1740       * </pre>
1741       */
1742      public static final int ERROR_NO_SUCH_PROTOCOL_VALUE = 3;
1743      /**
1744       * <code>ERROR_RPC_SERVER = 4;</code>
1745       *
1746       * <pre>
1747       * Rpc error on server side
1748       * </pre>
1749       */
1750      public static final int ERROR_RPC_SERVER_VALUE = 4;
1751      /**
1752       * <code>ERROR_SERIALIZING_RESPONSE = 5;</code>
1753       *
1754       * <pre>
1755       * error serializign response
1756       * </pre>
1757       */
1758      public static final int ERROR_SERIALIZING_RESPONSE_VALUE = 5;
1759      /**
1760       * <code>ERROR_RPC_VERSION_MISMATCH = 6;</code>
1761       *
1762       * <pre>
1763       * Rpc protocol version mismatch
1764       * </pre>
1765       */
1766      public static final int ERROR_RPC_VERSION_MISMATCH_VALUE = 6;
1767      /**
1768       * <code>FATAL_UNKNOWN = 10;</code>
1769       *
1770       * <pre>
1771       * Fatal Server side Rpc error - connection closed
1772       * </pre>
1773       */
1774      public static final int FATAL_UNKNOWN_VALUE = 10;
1775      /**
1776       * <code>FATAL_UNSUPPORTED_SERIALIZATION = 11;</code>
1777       *
1778       * <pre>
1779       * IPC layer serilization type invalid
1780       * </pre>
1781       */
1782      public static final int FATAL_UNSUPPORTED_SERIALIZATION_VALUE = 11;
1783      /**
1784       * <code>FATAL_INVALID_RPC_HEADER = 12;</code>
1785       *
1786       * <pre>
1787       * fields of RpcHeader are invalid
1788       * </pre>
1789       */
1790      public static final int FATAL_INVALID_RPC_HEADER_VALUE = 12;
1791      /**
1792       * <code>FATAL_DESERIALIZING_REQUEST = 13;</code>
1793       *
1794       * <pre>
1795       * could not deserilize rpc request
1796       * </pre>
1797       */
1798      public static final int FATAL_DESERIALIZING_REQUEST_VALUE = 13;
1799      /**
1800       * <code>FATAL_VERSION_MISMATCH = 14;</code>
1801       *
1802       * <pre>
1803       * Ipc Layer version mismatch
1804       * </pre>
1805       */
1806      public static final int FATAL_VERSION_MISMATCH_VALUE = 14;
1807      /**
1808       * <code>FATAL_UNAUTHORIZED = 15;</code>
1809       *
1810       * <pre>
1811       * Auth failed
1812       * </pre>
1813       */
1814      public static final int FATAL_UNAUTHORIZED_VALUE = 15;
1815
1816
1817      public final int getNumber() { return value; }
1818
1819      public static RpcErrorCodeProto valueOf(int value) {
1820        switch (value) {
1821          case 1: return ERROR_APPLICATION;
1822          case 2: return ERROR_NO_SUCH_METHOD;
1823          case 3: return ERROR_NO_SUCH_PROTOCOL;
1824          case 4: return ERROR_RPC_SERVER;
1825          case 5: return ERROR_SERIALIZING_RESPONSE;
1826          case 6: return ERROR_RPC_VERSION_MISMATCH;
1827          case 10: return FATAL_UNKNOWN;
1828          case 11: return FATAL_UNSUPPORTED_SERIALIZATION;
1829          case 12: return FATAL_INVALID_RPC_HEADER;
1830          case 13: return FATAL_DESERIALIZING_REQUEST;
1831          case 14: return FATAL_VERSION_MISMATCH;
1832          case 15: return FATAL_UNAUTHORIZED;
1833          default: return null;
1834        }
1835      }
1836
1837      public static com.google.protobuf.Internal.EnumLiteMap<RpcErrorCodeProto>
1838          internalGetValueMap() {
1839        return internalValueMap;
1840      }
1841      private static com.google.protobuf.Internal.EnumLiteMap<RpcErrorCodeProto>
1842          internalValueMap =
1843            new com.google.protobuf.Internal.EnumLiteMap<RpcErrorCodeProto>() {
1844              public RpcErrorCodeProto findValueByNumber(int number) {
1845                return RpcErrorCodeProto.valueOf(number);
1846              }
1847            };
1848
1849      public final com.google.protobuf.Descriptors.EnumValueDescriptor
1850          getValueDescriptor() {
1851        return getDescriptor().getValues().get(index);
1852      }
1853      public final com.google.protobuf.Descriptors.EnumDescriptor
1854          getDescriptorForType() {
1855        return getDescriptor();
1856      }
1857      public static final com.google.protobuf.Descriptors.EnumDescriptor
1858          getDescriptor() {
1859        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.getDescriptor().getEnumTypes().get(1);
1860      }
1861
1862      private static final RpcErrorCodeProto[] VALUES = values();
1863
1864      public static RpcErrorCodeProto valueOf(
1865          com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
1866        if (desc.getType() != getDescriptor()) {
1867          throw new java.lang.IllegalArgumentException(
1868            "EnumValueDescriptor is not for this type.");
1869        }
1870        return VALUES[desc.getIndex()];
1871      }
1872
1873      private final int index;
1874      private final int value;
1875
1876      private RpcErrorCodeProto(int index, int value) {
1877        this.index = index;
1878        this.value = value;
1879      }
1880
1881      // @@protoc_insertion_point(enum_scope:hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto)
1882    }
1883
1884    private int bitField0_;
1885    // required uint32 callId = 1;
1886    public static final int CALLID_FIELD_NUMBER = 1;
1887    private int callId_;
1888    /**
1889     * <code>required uint32 callId = 1;</code>
1890     *
1891     * <pre>
1892     * callId used in Request
1893     * </pre>
1894     */
1895    public boolean hasCallId() {
1896      return ((bitField0_ & 0x00000001) == 0x00000001);
1897    }
1898    /**
1899     * <code>required uint32 callId = 1;</code>
1900     *
1901     * <pre>
1902     * callId used in Request
1903     * </pre>
1904     */
1905    public int getCallId() {
1906      return callId_;
1907    }
1908
1909    // required .hadoop.common.RpcResponseHeaderProto.RpcStatusProto status = 2;
1910    public static final int STATUS_FIELD_NUMBER = 2;
1911    private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto status_;
1912    /**
1913     * <code>required .hadoop.common.RpcResponseHeaderProto.RpcStatusProto status = 2;</code>
1914     */
1915    public boolean hasStatus() {
1916      return ((bitField0_ & 0x00000002) == 0x00000002);
1917    }
1918    /**
1919     * <code>required .hadoop.common.RpcResponseHeaderProto.RpcStatusProto status = 2;</code>
1920     */
1921    public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto getStatus() {
1922      return status_;
1923    }
1924
1925    // optional uint32 serverIpcVersionNum = 3;
1926    public static final int SERVERIPCVERSIONNUM_FIELD_NUMBER = 3;
1927    private int serverIpcVersionNum_;
1928    /**
1929     * <code>optional uint32 serverIpcVersionNum = 3;</code>
1930     *
1931     * <pre>
1932     * Sent if success or fail
1933     * </pre>
1934     */
1935    public boolean hasServerIpcVersionNum() {
1936      return ((bitField0_ & 0x00000004) == 0x00000004);
1937    }
1938    /**
1939     * <code>optional uint32 serverIpcVersionNum = 3;</code>
1940     *
1941     * <pre>
1942     * Sent if success or fail
1943     * </pre>
1944     */
1945    public int getServerIpcVersionNum() {
1946      return serverIpcVersionNum_;
1947    }
1948
1949    // optional string exceptionClassName = 4;
1950    public static final int EXCEPTIONCLASSNAME_FIELD_NUMBER = 4;
1951    private java.lang.Object exceptionClassName_;
1952    /**
1953     * <code>optional string exceptionClassName = 4;</code>
1954     *
1955     * <pre>
1956     * if request fails
1957     * </pre>
1958     */
1959    public boolean hasExceptionClassName() {
1960      return ((bitField0_ & 0x00000008) == 0x00000008);
1961    }
1962    /**
1963     * <code>optional string exceptionClassName = 4;</code>
1964     *
1965     * <pre>
1966     * if request fails
1967     * </pre>
1968     */
1969    public java.lang.String getExceptionClassName() {
1970      java.lang.Object ref = exceptionClassName_;
1971      if (ref instanceof java.lang.String) {
1972        return (java.lang.String) ref;
1973      } else {
1974        com.google.protobuf.ByteString bs = 
1975            (com.google.protobuf.ByteString) ref;
1976        java.lang.String s = bs.toStringUtf8();
1977        if (bs.isValidUtf8()) {
1978          exceptionClassName_ = s;
1979        }
1980        return s;
1981      }
1982    }
1983    /**
1984     * <code>optional string exceptionClassName = 4;</code>
1985     *
1986     * <pre>
1987     * if request fails
1988     * </pre>
1989     */
1990    public com.google.protobuf.ByteString
1991        getExceptionClassNameBytes() {
1992      java.lang.Object ref = exceptionClassName_;
1993      if (ref instanceof java.lang.String) {
1994        com.google.protobuf.ByteString b = 
1995            com.google.protobuf.ByteString.copyFromUtf8(
1996                (java.lang.String) ref);
1997        exceptionClassName_ = b;
1998        return b;
1999      } else {
2000        return (com.google.protobuf.ByteString) ref;
2001      }
2002    }
2003
2004    // optional string errorMsg = 5;
2005    public static final int ERRORMSG_FIELD_NUMBER = 5;
2006    private java.lang.Object errorMsg_;
2007    /**
2008     * <code>optional string errorMsg = 5;</code>
2009     *
2010     * <pre>
2011     * if request fails, often contains strack trace
2012     * </pre>
2013     */
2014    public boolean hasErrorMsg() {
2015      return ((bitField0_ & 0x00000010) == 0x00000010);
2016    }
2017    /**
2018     * <code>optional string errorMsg = 5;</code>
2019     *
2020     * <pre>
2021     * if request fails, often contains strack trace
2022     * </pre>
2023     */
2024    public java.lang.String getErrorMsg() {
2025      java.lang.Object ref = errorMsg_;
2026      if (ref instanceof java.lang.String) {
2027        return (java.lang.String) ref;
2028      } else {
2029        com.google.protobuf.ByteString bs = 
2030            (com.google.protobuf.ByteString) ref;
2031        java.lang.String s = bs.toStringUtf8();
2032        if (bs.isValidUtf8()) {
2033          errorMsg_ = s;
2034        }
2035        return s;
2036      }
2037    }
2038    /**
2039     * <code>optional string errorMsg = 5;</code>
2040     *
2041     * <pre>
2042     * if request fails, often contains strack trace
2043     * </pre>
2044     */
2045    public com.google.protobuf.ByteString
2046        getErrorMsgBytes() {
2047      java.lang.Object ref = errorMsg_;
2048      if (ref instanceof java.lang.String) {
2049        com.google.protobuf.ByteString b = 
2050            com.google.protobuf.ByteString.copyFromUtf8(
2051                (java.lang.String) ref);
2052        errorMsg_ = b;
2053        return b;
2054      } else {
2055        return (com.google.protobuf.ByteString) ref;
2056      }
2057    }
2058
2059    // optional .hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail = 6;
2060    public static final int ERRORDETAIL_FIELD_NUMBER = 6;
2061    private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail_;
2062    /**
2063     * <code>optional .hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail = 6;</code>
2064     *
2065     * <pre>
2066     * in case of error
2067     * </pre>
2068     */
2069    public boolean hasErrorDetail() {
2070      return ((bitField0_ & 0x00000020) == 0x00000020);
2071    }
2072    /**
2073     * <code>optional .hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail = 6;</code>
2074     *
2075     * <pre>
2076     * in case of error
2077     * </pre>
2078     */
2079    public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto getErrorDetail() {
2080      return errorDetail_;
2081    }
2082
2083    // optional bytes clientId = 7;
2084    public static final int CLIENTID_FIELD_NUMBER = 7;
2085    private com.google.protobuf.ByteString clientId_;
2086    /**
2087     * <code>optional bytes clientId = 7;</code>
2088     *
2089     * <pre>
2090     * Globally unique client ID
2091     * </pre>
2092     */
2093    public boolean hasClientId() {
2094      return ((bitField0_ & 0x00000040) == 0x00000040);
2095    }
2096    /**
2097     * <code>optional bytes clientId = 7;</code>
2098     *
2099     * <pre>
2100     * Globally unique client ID
2101     * </pre>
2102     */
2103    public com.google.protobuf.ByteString getClientId() {
2104      return clientId_;
2105    }
2106
2107    // optional sint32 retryCount = 8 [default = -1];
2108    public static final int RETRYCOUNT_FIELD_NUMBER = 8;
2109    private int retryCount_;
2110    /**
2111     * <code>optional sint32 retryCount = 8 [default = -1];</code>
2112     */
2113    public boolean hasRetryCount() {
2114      return ((bitField0_ & 0x00000080) == 0x00000080);
2115    }
2116    /**
2117     * <code>optional sint32 retryCount = 8 [default = -1];</code>
2118     */
2119    public int getRetryCount() {
2120      return retryCount_;
2121    }
2122
2123    private void initFields() {
2124      callId_ = 0;
2125      status_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto.SUCCESS;
2126      serverIpcVersionNum_ = 0;
2127      exceptionClassName_ = "";
2128      errorMsg_ = "";
2129      errorDetail_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto.ERROR_APPLICATION;
2130      clientId_ = com.google.protobuf.ByteString.EMPTY;
2131      retryCount_ = -1;
2132    }
2133    private byte memoizedIsInitialized = -1;
2134    public final boolean isInitialized() {
2135      byte isInitialized = memoizedIsInitialized;
2136      if (isInitialized != -1) return isInitialized == 1;
2137
2138      if (!hasCallId()) {
2139        memoizedIsInitialized = 0;
2140        return false;
2141      }
2142      if (!hasStatus()) {
2143        memoizedIsInitialized = 0;
2144        return false;
2145      }
2146      memoizedIsInitialized = 1;
2147      return true;
2148    }
2149
2150    public void writeTo(com.google.protobuf.CodedOutputStream output)
2151                        throws java.io.IOException {
2152      getSerializedSize();
2153      if (((bitField0_ & 0x00000001) == 0x00000001)) {
2154        output.writeUInt32(1, callId_);
2155      }
2156      if (((bitField0_ & 0x00000002) == 0x00000002)) {
2157        output.writeEnum(2, status_.getNumber());
2158      }
2159      if (((bitField0_ & 0x00000004) == 0x00000004)) {
2160        output.writeUInt32(3, serverIpcVersionNum_);
2161      }
2162      if (((bitField0_ & 0x00000008) == 0x00000008)) {
2163        output.writeBytes(4, getExceptionClassNameBytes());
2164      }
2165      if (((bitField0_ & 0x00000010) == 0x00000010)) {
2166        output.writeBytes(5, getErrorMsgBytes());
2167      }
2168      if (((bitField0_ & 0x00000020) == 0x00000020)) {
2169        output.writeEnum(6, errorDetail_.getNumber());
2170      }
2171      if (((bitField0_ & 0x00000040) == 0x00000040)) {
2172        output.writeBytes(7, clientId_);
2173      }
2174      if (((bitField0_ & 0x00000080) == 0x00000080)) {
2175        output.writeSInt32(8, retryCount_);
2176      }
2177      getUnknownFields().writeTo(output);
2178    }
2179
2180    private int memoizedSerializedSize = -1;
2181    public int getSerializedSize() {
2182      int size = memoizedSerializedSize;
2183      if (size != -1) return size;
2184
2185      size = 0;
2186      if (((bitField0_ & 0x00000001) == 0x00000001)) {
2187        size += com.google.protobuf.CodedOutputStream
2188          .computeUInt32Size(1, callId_);
2189      }
2190      if (((bitField0_ & 0x00000002) == 0x00000002)) {
2191        size += com.google.protobuf.CodedOutputStream
2192          .computeEnumSize(2, status_.getNumber());
2193      }
2194      if (((bitField0_ & 0x00000004) == 0x00000004)) {
2195        size += com.google.protobuf.CodedOutputStream
2196          .computeUInt32Size(3, serverIpcVersionNum_);
2197      }
2198      if (((bitField0_ & 0x00000008) == 0x00000008)) {
2199        size += com.google.protobuf.CodedOutputStream
2200          .computeBytesSize(4, getExceptionClassNameBytes());
2201      }
2202      if (((bitField0_ & 0x00000010) == 0x00000010)) {
2203        size += com.google.protobuf.CodedOutputStream
2204          .computeBytesSize(5, getErrorMsgBytes());
2205      }
2206      if (((bitField0_ & 0x00000020) == 0x00000020)) {
2207        size += com.google.protobuf.CodedOutputStream
2208          .computeEnumSize(6, errorDetail_.getNumber());
2209      }
2210      if (((bitField0_ & 0x00000040) == 0x00000040)) {
2211        size += com.google.protobuf.CodedOutputStream
2212          .computeBytesSize(7, clientId_);
2213      }
2214      if (((bitField0_ & 0x00000080) == 0x00000080)) {
2215        size += com.google.protobuf.CodedOutputStream
2216          .computeSInt32Size(8, retryCount_);
2217      }
2218      size += getUnknownFields().getSerializedSize();
2219      memoizedSerializedSize = size;
2220      return size;
2221    }
2222
2223    private static final long serialVersionUID = 0L;
2224    @java.lang.Override
2225    protected java.lang.Object writeReplace()
2226        throws java.io.ObjectStreamException {
2227      return super.writeReplace();
2228    }
2229
2230    @java.lang.Override
2231    public boolean equals(final java.lang.Object obj) {
2232      if (obj == this) {
2233       return true;
2234      }
2235      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto)) {
2236        return super.equals(obj);
2237      }
2238      org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto other = (org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto) obj;
2239
2240      boolean result = true;
2241      result = result && (hasCallId() == other.hasCallId());
2242      if (hasCallId()) {
2243        result = result && (getCallId()
2244            == other.getCallId());
2245      }
2246      result = result && (hasStatus() == other.hasStatus());
2247      if (hasStatus()) {
2248        result = result &&
2249            (getStatus() == other.getStatus());
2250      }
2251      result = result && (hasServerIpcVersionNum() == other.hasServerIpcVersionNum());
2252      if (hasServerIpcVersionNum()) {
2253        result = result && (getServerIpcVersionNum()
2254            == other.getServerIpcVersionNum());
2255      }
2256      result = result && (hasExceptionClassName() == other.hasExceptionClassName());
2257      if (hasExceptionClassName()) {
2258        result = result && getExceptionClassName()
2259            .equals(other.getExceptionClassName());
2260      }
2261      result = result && (hasErrorMsg() == other.hasErrorMsg());
2262      if (hasErrorMsg()) {
2263        result = result && getErrorMsg()
2264            .equals(other.getErrorMsg());
2265      }
2266      result = result && (hasErrorDetail() == other.hasErrorDetail());
2267      if (hasErrorDetail()) {
2268        result = result &&
2269            (getErrorDetail() == other.getErrorDetail());
2270      }
2271      result = result && (hasClientId() == other.hasClientId());
2272      if (hasClientId()) {
2273        result = result && getClientId()
2274            .equals(other.getClientId());
2275      }
2276      result = result && (hasRetryCount() == other.hasRetryCount());
2277      if (hasRetryCount()) {
2278        result = result && (getRetryCount()
2279            == other.getRetryCount());
2280      }
2281      result = result &&
2282          getUnknownFields().equals(other.getUnknownFields());
2283      return result;
2284    }
2285
2286    private int memoizedHashCode = 0;
2287    @java.lang.Override
2288    public int hashCode() {
2289      if (memoizedHashCode != 0) {
2290        return memoizedHashCode;
2291      }
2292      int hash = 41;
2293      hash = (19 * hash) + getDescriptorForType().hashCode();
2294      if (hasCallId()) {
2295        hash = (37 * hash) + CALLID_FIELD_NUMBER;
2296        hash = (53 * hash) + getCallId();
2297      }
2298      if (hasStatus()) {
2299        hash = (37 * hash) + STATUS_FIELD_NUMBER;
2300        hash = (53 * hash) + hashEnum(getStatus());
2301      }
2302      if (hasServerIpcVersionNum()) {
2303        hash = (37 * hash) + SERVERIPCVERSIONNUM_FIELD_NUMBER;
2304        hash = (53 * hash) + getServerIpcVersionNum();
2305      }
2306      if (hasExceptionClassName()) {
2307        hash = (37 * hash) + EXCEPTIONCLASSNAME_FIELD_NUMBER;
2308        hash = (53 * hash) + getExceptionClassName().hashCode();
2309      }
2310      if (hasErrorMsg()) {
2311        hash = (37 * hash) + ERRORMSG_FIELD_NUMBER;
2312        hash = (53 * hash) + getErrorMsg().hashCode();
2313      }
2314      if (hasErrorDetail()) {
2315        hash = (37 * hash) + ERRORDETAIL_FIELD_NUMBER;
2316        hash = (53 * hash) + hashEnum(getErrorDetail());
2317      }
2318      if (hasClientId()) {
2319        hash = (37 * hash) + CLIENTID_FIELD_NUMBER;
2320        hash = (53 * hash) + getClientId().hashCode();
2321      }
2322      if (hasRetryCount()) {
2323        hash = (37 * hash) + RETRYCOUNT_FIELD_NUMBER;
2324        hash = (53 * hash) + getRetryCount();
2325      }
2326      hash = (29 * hash) + getUnknownFields().hashCode();
2327      memoizedHashCode = hash;
2328      return hash;
2329    }
2330
2331    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parseFrom(
2332        com.google.protobuf.ByteString data)
2333        throws com.google.protobuf.InvalidProtocolBufferException {
2334      return PARSER.parseFrom(data);
2335    }
2336    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parseFrom(
2337        com.google.protobuf.ByteString data,
2338        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2339        throws com.google.protobuf.InvalidProtocolBufferException {
2340      return PARSER.parseFrom(data, extensionRegistry);
2341    }
2342    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parseFrom(byte[] data)
2343        throws com.google.protobuf.InvalidProtocolBufferException {
2344      return PARSER.parseFrom(data);
2345    }
2346    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parseFrom(
2347        byte[] data,
2348        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2349        throws com.google.protobuf.InvalidProtocolBufferException {
2350      return PARSER.parseFrom(data, extensionRegistry);
2351    }
2352    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parseFrom(java.io.InputStream input)
2353        throws java.io.IOException {
2354      return PARSER.parseFrom(input);
2355    }
2356    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parseFrom(
2357        java.io.InputStream input,
2358        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2359        throws java.io.IOException {
2360      return PARSER.parseFrom(input, extensionRegistry);
2361    }
2362    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parseDelimitedFrom(java.io.InputStream input)
2363        throws java.io.IOException {
2364      return PARSER.parseDelimitedFrom(input);
2365    }
2366    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parseDelimitedFrom(
2367        java.io.InputStream input,
2368        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2369        throws java.io.IOException {
2370      return PARSER.parseDelimitedFrom(input, extensionRegistry);
2371    }
2372    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parseFrom(
2373        com.google.protobuf.CodedInputStream input)
2374        throws java.io.IOException {
2375      return PARSER.parseFrom(input);
2376    }
2377    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parseFrom(
2378        com.google.protobuf.CodedInputStream input,
2379        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2380        throws java.io.IOException {
2381      return PARSER.parseFrom(input, extensionRegistry);
2382    }
2383
2384    public static Builder newBuilder() { return Builder.create(); }
2385    public Builder newBuilderForType() { return newBuilder(); }
2386    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto prototype) {
2387      return newBuilder().mergeFrom(prototype);
2388    }
2389    public Builder toBuilder() { return newBuilder(this); }
2390
2391    @java.lang.Override
2392    protected Builder newBuilderForType(
2393        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2394      Builder builder = new Builder(parent);
2395      return builder;
2396    }
2397    /**
2398     * Protobuf type {@code hadoop.common.RpcResponseHeaderProto}
2399     *
2400     * <pre>
2401     **
2402     * Rpc Response Header
2403     * +------------------------------------------------------------------+
2404     * | Rpc total response length in bytes (4 bytes int)                 |
2405     * |  (sum of next two parts)                                         |
2406     * +------------------------------------------------------------------+
2407     * | RpcResponseHeaderProto - serialized delimited ie has len         |
2408     * +------------------------------------------------------------------+
2409     * | if request is successful:                                        |
2410     * |   - RpcResponse -  The actual rpc response  bytes follow         |
2411     * |     the response header                                          |
2412     * |     This response is serialized based on RpcKindProto            |
2413     * | if request fails :                                               |
2414     * |   The rpc response header contains the necessary info            |
2415     * +------------------------------------------------------------------+
2416     *
2417     * Note that rpc response header is also used when connection setup fails. 
2418     * Ie the response looks like a rpc response with a fake callId.
2419     * </pre>
2420     */
2421    public static final class Builder extends
2422        com.google.protobuf.GeneratedMessage.Builder<Builder>
2423       implements org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProtoOrBuilder {
2424      public static final com.google.protobuf.Descriptors.Descriptor
2425          getDescriptor() {
2426        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcResponseHeaderProto_descriptor;
2427      }
2428
2429      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
2430          internalGetFieldAccessorTable() {
2431        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcResponseHeaderProto_fieldAccessorTable
2432            .ensureFieldAccessorsInitialized(
2433                org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.Builder.class);
2434      }
2435
2436      // Construct using org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.newBuilder()
2437      private Builder() {
2438        maybeForceBuilderInitialization();
2439      }
2440
2441      private Builder(
2442          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2443        super(parent);
2444        maybeForceBuilderInitialization();
2445      }
2446      private void maybeForceBuilderInitialization() {
2447        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
2448        }
2449      }
2450      private static Builder create() {
2451        return new Builder();
2452      }
2453
2454      public Builder clear() {
2455        super.clear();
2456        callId_ = 0;
2457        bitField0_ = (bitField0_ & ~0x00000001);
2458        status_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto.SUCCESS;
2459        bitField0_ = (bitField0_ & ~0x00000002);
2460        serverIpcVersionNum_ = 0;
2461        bitField0_ = (bitField0_ & ~0x00000004);
2462        exceptionClassName_ = "";
2463        bitField0_ = (bitField0_ & ~0x00000008);
2464        errorMsg_ = "";
2465        bitField0_ = (bitField0_ & ~0x00000010);
2466        errorDetail_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto.ERROR_APPLICATION;
2467        bitField0_ = (bitField0_ & ~0x00000020);
2468        clientId_ = com.google.protobuf.ByteString.EMPTY;
2469        bitField0_ = (bitField0_ & ~0x00000040);
2470        retryCount_ = -1;
2471        bitField0_ = (bitField0_ & ~0x00000080);
2472        return this;
2473      }
2474
2475      public Builder clone() {
2476        return create().mergeFrom(buildPartial());
2477      }
2478
2479      public com.google.protobuf.Descriptors.Descriptor
2480          getDescriptorForType() {
2481        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcResponseHeaderProto_descriptor;
2482      }
2483
2484      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto getDefaultInstanceForType() {
2485        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.getDefaultInstance();
2486      }
2487
2488      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto build() {
2489        org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto result = buildPartial();
2490        if (!result.isInitialized()) {
2491          throw newUninitializedMessageException(result);
2492        }
2493        return result;
2494      }
2495
2496      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto buildPartial() {
2497        org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto result = new org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto(this);
2498        int from_bitField0_ = bitField0_;
2499        int to_bitField0_ = 0;
2500        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
2501          to_bitField0_ |= 0x00000001;
2502        }
2503        result.callId_ = callId_;
2504        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
2505          to_bitField0_ |= 0x00000002;
2506        }
2507        result.status_ = status_;
2508        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
2509          to_bitField0_ |= 0x00000004;
2510        }
2511        result.serverIpcVersionNum_ = serverIpcVersionNum_;
2512        if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
2513          to_bitField0_ |= 0x00000008;
2514        }
2515        result.exceptionClassName_ = exceptionClassName_;
2516        if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
2517          to_bitField0_ |= 0x00000010;
2518        }
2519        result.errorMsg_ = errorMsg_;
2520        if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
2521          to_bitField0_ |= 0x00000020;
2522        }
2523        result.errorDetail_ = errorDetail_;
2524        if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
2525          to_bitField0_ |= 0x00000040;
2526        }
2527        result.clientId_ = clientId_;
2528        if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
2529          to_bitField0_ |= 0x00000080;
2530        }
2531        result.retryCount_ = retryCount_;
2532        result.bitField0_ = to_bitField0_;
2533        onBuilt();
2534        return result;
2535      }
2536
2537      public Builder mergeFrom(com.google.protobuf.Message other) {
2538        if (other instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto) {
2539          return mergeFrom((org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto)other);
2540        } else {
2541          super.mergeFrom(other);
2542          return this;
2543        }
2544      }
2545
2546      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto other) {
2547        if (other == org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.getDefaultInstance()) return this;
2548        if (other.hasCallId()) {
2549          setCallId(other.getCallId());
2550        }
2551        if (other.hasStatus()) {
2552          setStatus(other.getStatus());
2553        }
2554        if (other.hasServerIpcVersionNum()) {
2555          setServerIpcVersionNum(other.getServerIpcVersionNum());
2556        }
2557        if (other.hasExceptionClassName()) {
2558          bitField0_ |= 0x00000008;
2559          exceptionClassName_ = other.exceptionClassName_;
2560          onChanged();
2561        }
2562        if (other.hasErrorMsg()) {
2563          bitField0_ |= 0x00000010;
2564          errorMsg_ = other.errorMsg_;
2565          onChanged();
2566        }
2567        if (other.hasErrorDetail()) {
2568          setErrorDetail(other.getErrorDetail());
2569        }
2570        if (other.hasClientId()) {
2571          setClientId(other.getClientId());
2572        }
2573        if (other.hasRetryCount()) {
2574          setRetryCount(other.getRetryCount());
2575        }
2576        this.mergeUnknownFields(other.getUnknownFields());
2577        return this;
2578      }
2579
2580      public final boolean isInitialized() {
2581        if (!hasCallId()) {
2582          
2583          return false;
2584        }
2585        if (!hasStatus()) {
2586          
2587          return false;
2588        }
2589        return true;
2590      }
2591
2592      public Builder mergeFrom(
2593          com.google.protobuf.CodedInputStream input,
2594          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2595          throws java.io.IOException {
2596        org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parsedMessage = null;
2597        try {
2598          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
2599        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
2600          parsedMessage = (org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto) e.getUnfinishedMessage();
2601          throw e;
2602        } finally {
2603          if (parsedMessage != null) {
2604            mergeFrom(parsedMessage);
2605          }
2606        }
2607        return this;
2608      }
2609      private int bitField0_;
2610
2611      // required uint32 callId = 1;
2612      private int callId_ ;
2613      /**
2614       * <code>required uint32 callId = 1;</code>
2615       *
2616       * <pre>
2617       * callId used in Request
2618       * </pre>
2619       */
2620      public boolean hasCallId() {
2621        return ((bitField0_ & 0x00000001) == 0x00000001);
2622      }
2623      /**
2624       * <code>required uint32 callId = 1;</code>
2625       *
2626       * <pre>
2627       * callId used in Request
2628       * </pre>
2629       */
2630      public int getCallId() {
2631        return callId_;
2632      }
2633      /**
2634       * <code>required uint32 callId = 1;</code>
2635       *
2636       * <pre>
2637       * callId used in Request
2638       * </pre>
2639       */
2640      public Builder setCallId(int value) {
2641        bitField0_ |= 0x00000001;
2642        callId_ = value;
2643        onChanged();
2644        return this;
2645      }
2646      /**
2647       * <code>required uint32 callId = 1;</code>
2648       *
2649       * <pre>
2650       * callId used in Request
2651       * </pre>
2652       */
2653      public Builder clearCallId() {
2654        bitField0_ = (bitField0_ & ~0x00000001);
2655        callId_ = 0;
2656        onChanged();
2657        return this;
2658      }
2659
2660      // required .hadoop.common.RpcResponseHeaderProto.RpcStatusProto status = 2;
2661      private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto status_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto.SUCCESS;
2662      /**
2663       * <code>required .hadoop.common.RpcResponseHeaderProto.RpcStatusProto status = 2;</code>
2664       */
2665      public boolean hasStatus() {
2666        return ((bitField0_ & 0x00000002) == 0x00000002);
2667      }
2668      /**
2669       * <code>required .hadoop.common.RpcResponseHeaderProto.RpcStatusProto status = 2;</code>
2670       */
2671      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto getStatus() {
2672        return status_;
2673      }
2674      /**
2675       * <code>required .hadoop.common.RpcResponseHeaderProto.RpcStatusProto status = 2;</code>
2676       */
2677      public Builder setStatus(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto value) {
2678        if (value == null) {
2679          throw new NullPointerException();
2680        }
2681        bitField0_ |= 0x00000002;
2682        status_ = value;
2683        onChanged();
2684        return this;
2685      }
2686      /**
2687       * <code>required .hadoop.common.RpcResponseHeaderProto.RpcStatusProto status = 2;</code>
2688       */
2689      public Builder clearStatus() {
2690        bitField0_ = (bitField0_ & ~0x00000002);
2691        status_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto.SUCCESS;
2692        onChanged();
2693        return this;
2694      }
2695
2696      // optional uint32 serverIpcVersionNum = 3;
2697      private int serverIpcVersionNum_ ;
2698      /**
2699       * <code>optional uint32 serverIpcVersionNum = 3;</code>
2700       *
2701       * <pre>
2702       * Sent if success or fail
2703       * </pre>
2704       */
2705      public boolean hasServerIpcVersionNum() {
2706        return ((bitField0_ & 0x00000004) == 0x00000004);
2707      }
2708      /**
2709       * <code>optional uint32 serverIpcVersionNum = 3;</code>
2710       *
2711       * <pre>
2712       * Sent if success or fail
2713       * </pre>
2714       */
2715      public int getServerIpcVersionNum() {
2716        return serverIpcVersionNum_;
2717      }
2718      /**
2719       * <code>optional uint32 serverIpcVersionNum = 3;</code>
2720       *
2721       * <pre>
2722       * Sent if success or fail
2723       * </pre>
2724       */
2725      public Builder setServerIpcVersionNum(int value) {
2726        bitField0_ |= 0x00000004;
2727        serverIpcVersionNum_ = value;
2728        onChanged();
2729        return this;
2730      }
2731      /**
2732       * <code>optional uint32 serverIpcVersionNum = 3;</code>
2733       *
2734       * <pre>
2735       * Sent if success or fail
2736       * </pre>
2737       */
2738      public Builder clearServerIpcVersionNum() {
2739        bitField0_ = (bitField0_ & ~0x00000004);
2740        serverIpcVersionNum_ = 0;
2741        onChanged();
2742        return this;
2743      }
2744
2745      // optional string exceptionClassName = 4;
2746      private java.lang.Object exceptionClassName_ = "";
2747      /**
2748       * <code>optional string exceptionClassName = 4;</code>
2749       *
2750       * <pre>
2751       * if request fails
2752       * </pre>
2753       */
2754      public boolean hasExceptionClassName() {
2755        return ((bitField0_ & 0x00000008) == 0x00000008);
2756      }
2757      /**
2758       * <code>optional string exceptionClassName = 4;</code>
2759       *
2760       * <pre>
2761       * if request fails
2762       * </pre>
2763       */
2764      public java.lang.String getExceptionClassName() {
2765        java.lang.Object ref = exceptionClassName_;
2766        if (!(ref instanceof java.lang.String)) {
2767          java.lang.String s = ((com.google.protobuf.ByteString) ref)
2768              .toStringUtf8();
2769          exceptionClassName_ = s;
2770          return s;
2771        } else {
2772          return (java.lang.String) ref;
2773        }
2774      }
2775      /**
2776       * <code>optional string exceptionClassName = 4;</code>
2777       *
2778       * <pre>
2779       * if request fails
2780       * </pre>
2781       */
2782      public com.google.protobuf.ByteString
2783          getExceptionClassNameBytes() {
2784        java.lang.Object ref = exceptionClassName_;
2785        if (ref instanceof String) {
2786          com.google.protobuf.ByteString b = 
2787              com.google.protobuf.ByteString.copyFromUtf8(
2788                  (java.lang.String) ref);
2789          exceptionClassName_ = b;
2790          return b;
2791        } else {
2792          return (com.google.protobuf.ByteString) ref;
2793        }
2794      }
2795      /**
2796       * <code>optional string exceptionClassName = 4;</code>
2797       *
2798       * <pre>
2799       * if request fails
2800       * </pre>
2801       */
2802      public Builder setExceptionClassName(
2803          java.lang.String value) {
2804        if (value == null) {
2805    throw new NullPointerException();
2806  }
2807  bitField0_ |= 0x00000008;
2808        exceptionClassName_ = value;
2809        onChanged();
2810        return this;
2811      }
2812      /**
2813       * <code>optional string exceptionClassName = 4;</code>
2814       *
2815       * <pre>
2816       * if request fails
2817       * </pre>
2818       */
2819      public Builder clearExceptionClassName() {
2820        bitField0_ = (bitField0_ & ~0x00000008);
2821        exceptionClassName_ = getDefaultInstance().getExceptionClassName();
2822        onChanged();
2823        return this;
2824      }
2825      /**
2826       * <code>optional string exceptionClassName = 4;</code>
2827       *
2828       * <pre>
2829       * if request fails
2830       * </pre>
2831       */
2832      public Builder setExceptionClassNameBytes(
2833          com.google.protobuf.ByteString value) {
2834        if (value == null) {
2835    throw new NullPointerException();
2836  }
2837  bitField0_ |= 0x00000008;
2838        exceptionClassName_ = value;
2839        onChanged();
2840        return this;
2841      }
2842
2843      // optional string errorMsg = 5;
2844      private java.lang.Object errorMsg_ = "";
2845      /**
2846       * <code>optional string errorMsg = 5;</code>
2847       *
2848       * <pre>
2849       * if request fails, often contains strack trace
2850       * </pre>
2851       */
2852      public boolean hasErrorMsg() {
2853        return ((bitField0_ & 0x00000010) == 0x00000010);
2854      }
2855      /**
2856       * <code>optional string errorMsg = 5;</code>
2857       *
2858       * <pre>
2859       * if request fails, often contains strack trace
2860       * </pre>
2861       */
2862      public java.lang.String getErrorMsg() {
2863        java.lang.Object ref = errorMsg_;
2864        if (!(ref instanceof java.lang.String)) {
2865          java.lang.String s = ((com.google.protobuf.ByteString) ref)
2866              .toStringUtf8();
2867          errorMsg_ = s;
2868          return s;
2869        } else {
2870          return (java.lang.String) ref;
2871        }
2872      }
2873      /**
2874       * <code>optional string errorMsg = 5;</code>
2875       *
2876       * <pre>
2877       * if request fails, often contains strack trace
2878       * </pre>
2879       */
2880      public com.google.protobuf.ByteString
2881          getErrorMsgBytes() {
2882        java.lang.Object ref = errorMsg_;
2883        if (ref instanceof String) {
2884          com.google.protobuf.ByteString b = 
2885              com.google.protobuf.ByteString.copyFromUtf8(
2886                  (java.lang.String) ref);
2887          errorMsg_ = b;
2888          return b;
2889        } else {
2890          return (com.google.protobuf.ByteString) ref;
2891        }
2892      }
2893      /**
2894       * <code>optional string errorMsg = 5;</code>
2895       *
2896       * <pre>
2897       * if request fails, often contains strack trace
2898       * </pre>
2899       */
2900      public Builder setErrorMsg(
2901          java.lang.String value) {
2902        if (value == null) {
2903    throw new NullPointerException();
2904  }
2905  bitField0_ |= 0x00000010;
2906        errorMsg_ = value;
2907        onChanged();
2908        return this;
2909      }
2910      /**
2911       * <code>optional string errorMsg = 5;</code>
2912       *
2913       * <pre>
2914       * if request fails, often contains strack trace
2915       * </pre>
2916       */
2917      public Builder clearErrorMsg() {
2918        bitField0_ = (bitField0_ & ~0x00000010);
2919        errorMsg_ = getDefaultInstance().getErrorMsg();
2920        onChanged();
2921        return this;
2922      }
2923      /**
2924       * <code>optional string errorMsg = 5;</code>
2925       *
2926       * <pre>
2927       * if request fails, often contains strack trace
2928       * </pre>
2929       */
2930      public Builder setErrorMsgBytes(
2931          com.google.protobuf.ByteString value) {
2932        if (value == null) {
2933    throw new NullPointerException();
2934  }
2935  bitField0_ |= 0x00000010;
2936        errorMsg_ = value;
2937        onChanged();
2938        return this;
2939      }
2940
2941      // optional .hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail = 6;
2942      private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto.ERROR_APPLICATION;
2943      /**
2944       * <code>optional .hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail = 6;</code>
2945       *
2946       * <pre>
2947       * in case of error
2948       * </pre>
2949       */
2950      public boolean hasErrorDetail() {
2951        return ((bitField0_ & 0x00000020) == 0x00000020);
2952      }
2953      /**
2954       * <code>optional .hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail = 6;</code>
2955       *
2956       * <pre>
2957       * in case of error
2958       * </pre>
2959       */
2960      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto getErrorDetail() {
2961        return errorDetail_;
2962      }
2963      /**
2964       * <code>optional .hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail = 6;</code>
2965       *
2966       * <pre>
2967       * in case of error
2968       * </pre>
2969       */
2970      public Builder setErrorDetail(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto value) {
2971        if (value == null) {
2972          throw new NullPointerException();
2973        }
2974        bitField0_ |= 0x00000020;
2975        errorDetail_ = value;
2976        onChanged();
2977        return this;
2978      }
2979      /**
2980       * <code>optional .hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail = 6;</code>
2981       *
2982       * <pre>
2983       * in case of error
2984       * </pre>
2985       */
2986      public Builder clearErrorDetail() {
2987        bitField0_ = (bitField0_ & ~0x00000020);
2988        errorDetail_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto.ERROR_APPLICATION;
2989        onChanged();
2990        return this;
2991      }
2992
2993      // optional bytes clientId = 7;
2994      private com.google.protobuf.ByteString clientId_ = com.google.protobuf.ByteString.EMPTY;
2995      /**
2996       * <code>optional bytes clientId = 7;</code>
2997       *
2998       * <pre>
2999       * Globally unique client ID
3000       * </pre>
3001       */
3002      public boolean hasClientId() {
3003        return ((bitField0_ & 0x00000040) == 0x00000040);
3004      }
3005      /**
3006       * <code>optional bytes clientId = 7;</code>
3007       *
3008       * <pre>
3009       * Globally unique client ID
3010       * </pre>
3011       */
3012      public com.google.protobuf.ByteString getClientId() {
3013        return clientId_;
3014      }
3015      /**
3016       * <code>optional bytes clientId = 7;</code>
3017       *
3018       * <pre>
3019       * Globally unique client ID
3020       * </pre>
3021       */
3022      public Builder setClientId(com.google.protobuf.ByteString value) {
3023        if (value == null) {
3024    throw new NullPointerException();
3025  }
3026  bitField0_ |= 0x00000040;
3027        clientId_ = value;
3028        onChanged();
3029        return this;
3030      }
3031      /**
3032       * <code>optional bytes clientId = 7;</code>
3033       *
3034       * <pre>
3035       * Globally unique client ID
3036       * </pre>
3037       */
3038      public Builder clearClientId() {
3039        bitField0_ = (bitField0_ & ~0x00000040);
3040        clientId_ = getDefaultInstance().getClientId();
3041        onChanged();
3042        return this;
3043      }
3044
3045      // optional sint32 retryCount = 8 [default = -1];
3046      private int retryCount_ = -1;
3047      /**
3048       * <code>optional sint32 retryCount = 8 [default = -1];</code>
3049       */
3050      public boolean hasRetryCount() {
3051        return ((bitField0_ & 0x00000080) == 0x00000080);
3052      }
3053      /**
3054       * <code>optional sint32 retryCount = 8 [default = -1];</code>
3055       */
3056      public int getRetryCount() {
3057        return retryCount_;
3058      }
3059      /**
3060       * <code>optional sint32 retryCount = 8 [default = -1];</code>
3061       */
3062      public Builder setRetryCount(int value) {
3063        bitField0_ |= 0x00000080;
3064        retryCount_ = value;
3065        onChanged();
3066        return this;
3067      }
3068      /**
3069       * <code>optional sint32 retryCount = 8 [default = -1];</code>
3070       */
3071      public Builder clearRetryCount() {
3072        bitField0_ = (bitField0_ & ~0x00000080);
3073        retryCount_ = -1;
3074        onChanged();
3075        return this;
3076      }
3077
3078      // @@protoc_insertion_point(builder_scope:hadoop.common.RpcResponseHeaderProto)
3079    }
3080
3081    static {
3082      defaultInstance = new RpcResponseHeaderProto(true);
3083      defaultInstance.initFields();
3084    }
3085
3086    // @@protoc_insertion_point(class_scope:hadoop.common.RpcResponseHeaderProto)
3087  }
3088
3089  public interface RpcSaslProtoOrBuilder
3090      extends com.google.protobuf.MessageOrBuilder {
3091
3092    // optional uint32 version = 1;
3093    /**
3094     * <code>optional uint32 version = 1;</code>
3095     */
3096    boolean hasVersion();
3097    /**
3098     * <code>optional uint32 version = 1;</code>
3099     */
3100    int getVersion();
3101
3102    // required .hadoop.common.RpcSaslProto.SaslState state = 2;
3103    /**
3104     * <code>required .hadoop.common.RpcSaslProto.SaslState state = 2;</code>
3105     */
3106    boolean hasState();
3107    /**
3108     * <code>required .hadoop.common.RpcSaslProto.SaslState state = 2;</code>
3109     */
3110    org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState getState();
3111
3112    // optional bytes token = 3;
3113    /**
3114     * <code>optional bytes token = 3;</code>
3115     */
3116    boolean hasToken();
3117    /**
3118     * <code>optional bytes token = 3;</code>
3119     */
3120    com.google.protobuf.ByteString getToken();
3121
3122    // repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
3123    /**
3124     * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
3125     */
3126    java.util.List<org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth> 
3127        getAuthsList();
3128    /**
3129     * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
3130     */
3131    org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth getAuths(int index);
3132    /**
3133     * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
3134     */
3135    int getAuthsCount();
3136    /**
3137     * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
3138     */
3139    java.util.List<? extends org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuthOrBuilder> 
3140        getAuthsOrBuilderList();
3141    /**
3142     * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
3143     */
3144    org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuthOrBuilder getAuthsOrBuilder(
3145        int index);
3146  }
3147  /**
3148   * Protobuf type {@code hadoop.common.RpcSaslProto}
3149   */
3150  public static final class RpcSaslProto extends
3151      com.google.protobuf.GeneratedMessage
3152      implements RpcSaslProtoOrBuilder {
3153    // Use RpcSaslProto.newBuilder() to construct.
3154    private RpcSaslProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
3155      super(builder);
3156      this.unknownFields = builder.getUnknownFields();
3157    }
3158    private RpcSaslProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
3159
3160    private static final RpcSaslProto defaultInstance;
3161    public static RpcSaslProto getDefaultInstance() {
3162      return defaultInstance;
3163    }
3164
3165    public RpcSaslProto getDefaultInstanceForType() {
3166      return defaultInstance;
3167    }
3168
3169    private final com.google.protobuf.UnknownFieldSet unknownFields;
3170    @java.lang.Override
3171    public final com.google.protobuf.UnknownFieldSet
3172        getUnknownFields() {
3173      return this.unknownFields;
3174    }
3175    private RpcSaslProto(
3176        com.google.protobuf.CodedInputStream input,
3177        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3178        throws com.google.protobuf.InvalidProtocolBufferException {
3179      initFields();
3180      int mutable_bitField0_ = 0;
3181      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
3182          com.google.protobuf.UnknownFieldSet.newBuilder();
3183      try {
3184        boolean done = false;
3185        while (!done) {
3186          int tag = input.readTag();
3187          switch (tag) {
3188            case 0:
3189              done = true;
3190              break;
3191            default: {
3192              if (!parseUnknownField(input, unknownFields,
3193                                     extensionRegistry, tag)) {
3194                done = true;
3195              }
3196              break;
3197            }
3198            case 8: {
3199              bitField0_ |= 0x00000001;
3200              version_ = input.readUInt32();
3201              break;
3202            }
3203            case 16: {
3204              int rawValue = input.readEnum();
3205              org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState value = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState.valueOf(rawValue);
3206              if (value == null) {
3207                unknownFields.mergeVarintField(2, rawValue);
3208              } else {
3209                bitField0_ |= 0x00000002;
3210                state_ = value;
3211              }
3212              break;
3213            }
3214            case 26: {
3215              bitField0_ |= 0x00000004;
3216              token_ = input.readBytes();
3217              break;
3218            }
3219            case 34: {
3220              if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
3221                auths_ = new java.util.ArrayList<org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth>();
3222                mutable_bitField0_ |= 0x00000008;
3223              }
3224              auths_.add(input.readMessage(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.PARSER, extensionRegistry));
3225              break;
3226            }
3227          }
3228        }
3229      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
3230        throw e.setUnfinishedMessage(this);
3231      } catch (java.io.IOException e) {
3232        throw new com.google.protobuf.InvalidProtocolBufferException(
3233            e.getMessage()).setUnfinishedMessage(this);
3234      } finally {
3235        if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
3236          auths_ = java.util.Collections.unmodifiableList(auths_);
3237        }
3238        this.unknownFields = unknownFields.build();
3239        makeExtensionsImmutable();
3240      }
3241    }
3242    public static final com.google.protobuf.Descriptors.Descriptor
3243        getDescriptor() {
3244      return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcSaslProto_descriptor;
3245    }
3246
3247    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
3248        internalGetFieldAccessorTable() {
3249      return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcSaslProto_fieldAccessorTable
3250          .ensureFieldAccessorsInitialized(
3251              org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.Builder.class);
3252    }
3253
3254    public static com.google.protobuf.Parser<RpcSaslProto> PARSER =
3255        new com.google.protobuf.AbstractParser<RpcSaslProto>() {
3256      public RpcSaslProto parsePartialFrom(
3257          com.google.protobuf.CodedInputStream input,
3258          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3259          throws com.google.protobuf.InvalidProtocolBufferException {
3260        return new RpcSaslProto(input, extensionRegistry);
3261      }
3262    };
3263
3264    @java.lang.Override
3265    public com.google.protobuf.Parser<RpcSaslProto> getParserForType() {
3266      return PARSER;
3267    }
3268
3269    /**
3270     * Protobuf enum {@code hadoop.common.RpcSaslProto.SaslState}
3271     */
3272    public enum SaslState
3273        implements com.google.protobuf.ProtocolMessageEnum {
3274      /**
3275       * <code>SUCCESS = 0;</code>
3276       */
3277      SUCCESS(0, 0),
3278      /**
3279       * <code>NEGOTIATE = 1;</code>
3280       */
3281      NEGOTIATE(1, 1),
3282      /**
3283       * <code>INITIATE = 2;</code>
3284       */
3285      INITIATE(2, 2),
3286      /**
3287       * <code>CHALLENGE = 3;</code>
3288       */
3289      CHALLENGE(3, 3),
3290      /**
3291       * <code>RESPONSE = 4;</code>
3292       */
3293      RESPONSE(4, 4),
3294      /**
3295       * <code>WRAP = 5;</code>
3296       */
3297      WRAP(5, 5),
3298      ;
3299
3300      /**
3301       * <code>SUCCESS = 0;</code>
3302       */
3303      public static final int SUCCESS_VALUE = 0;
3304      /**
3305       * <code>NEGOTIATE = 1;</code>
3306       */
3307      public static final int NEGOTIATE_VALUE = 1;
3308      /**
3309       * <code>INITIATE = 2;</code>
3310       */
3311      public static final int INITIATE_VALUE = 2;
3312      /**
3313       * <code>CHALLENGE = 3;</code>
3314       */
3315      public static final int CHALLENGE_VALUE = 3;
3316      /**
3317       * <code>RESPONSE = 4;</code>
3318       */
3319      public static final int RESPONSE_VALUE = 4;
3320      /**
3321       * <code>WRAP = 5;</code>
3322       */
3323      public static final int WRAP_VALUE = 5;
3324
3325
3326      public final int getNumber() { return value; }
3327
3328      public static SaslState valueOf(int value) {
3329        switch (value) {
3330          case 0: return SUCCESS;
3331          case 1: return NEGOTIATE;
3332          case 2: return INITIATE;
3333          case 3: return CHALLENGE;
3334          case 4: return RESPONSE;
3335          case 5: return WRAP;
3336          default: return null;
3337        }
3338      }
3339
3340      public static com.google.protobuf.Internal.EnumLiteMap<SaslState>
3341          internalGetValueMap() {
3342        return internalValueMap;
3343      }
3344      private static com.google.protobuf.Internal.EnumLiteMap<SaslState>
3345          internalValueMap =
3346            new com.google.protobuf.Internal.EnumLiteMap<SaslState>() {
3347              public SaslState findValueByNumber(int number) {
3348                return SaslState.valueOf(number);
3349              }
3350            };
3351
3352      public final com.google.protobuf.Descriptors.EnumValueDescriptor
3353          getValueDescriptor() {
3354        return getDescriptor().getValues().get(index);
3355      }
3356      public final com.google.protobuf.Descriptors.EnumDescriptor
3357          getDescriptorForType() {
3358        return getDescriptor();
3359      }
3360      public static final com.google.protobuf.Descriptors.EnumDescriptor
3361          getDescriptor() {
3362        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.getDescriptor().getEnumTypes().get(0);
3363      }
3364
3365      private static final SaslState[] VALUES = values();
3366
3367      public static SaslState valueOf(
3368          com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
3369        if (desc.getType() != getDescriptor()) {
3370          throw new java.lang.IllegalArgumentException(
3371            "EnumValueDescriptor is not for this type.");
3372        }
3373        return VALUES[desc.getIndex()];
3374      }
3375
3376      private final int index;
3377      private final int value;
3378
3379      private SaslState(int index, int value) {
3380        this.index = index;
3381        this.value = value;
3382      }
3383
3384      // @@protoc_insertion_point(enum_scope:hadoop.common.RpcSaslProto.SaslState)
3385    }
3386
3387    public interface SaslAuthOrBuilder
3388        extends com.google.protobuf.MessageOrBuilder {
3389
3390      // required string method = 1;
3391      /**
3392       * <code>required string method = 1;</code>
3393       */
3394      boolean hasMethod();
3395      /**
3396       * <code>required string method = 1;</code>
3397       */
3398      java.lang.String getMethod();
3399      /**
3400       * <code>required string method = 1;</code>
3401       */
3402      com.google.protobuf.ByteString
3403          getMethodBytes();
3404
3405      // required string mechanism = 2;
3406      /**
3407       * <code>required string mechanism = 2;</code>
3408       */
3409      boolean hasMechanism();
3410      /**
3411       * <code>required string mechanism = 2;</code>
3412       */
3413      java.lang.String getMechanism();
3414      /**
3415       * <code>required string mechanism = 2;</code>
3416       */
3417      com.google.protobuf.ByteString
3418          getMechanismBytes();
3419
3420      // optional string protocol = 3;
3421      /**
3422       * <code>optional string protocol = 3;</code>
3423       */
3424      boolean hasProtocol();
3425      /**
3426       * <code>optional string protocol = 3;</code>
3427       */
3428      java.lang.String getProtocol();
3429      /**
3430       * <code>optional string protocol = 3;</code>
3431       */
3432      com.google.protobuf.ByteString
3433          getProtocolBytes();
3434
3435      // optional string serverId = 4;
3436      /**
3437       * <code>optional string serverId = 4;</code>
3438       */
3439      boolean hasServerId();
3440      /**
3441       * <code>optional string serverId = 4;</code>
3442       */
3443      java.lang.String getServerId();
3444      /**
3445       * <code>optional string serverId = 4;</code>
3446       */
3447      com.google.protobuf.ByteString
3448          getServerIdBytes();
3449
3450      // optional bytes challenge = 5;
3451      /**
3452       * <code>optional bytes challenge = 5;</code>
3453       */
3454      boolean hasChallenge();
3455      /**
3456       * <code>optional bytes challenge = 5;</code>
3457       */
3458      com.google.protobuf.ByteString getChallenge();
3459    }
3460    /**
3461     * Protobuf type {@code hadoop.common.RpcSaslProto.SaslAuth}
3462     */
3463    public static final class SaslAuth extends
3464        com.google.protobuf.GeneratedMessage
3465        implements SaslAuthOrBuilder {
3466      // Use SaslAuth.newBuilder() to construct.
3467      private SaslAuth(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
3468        super(builder);
3469        this.unknownFields = builder.getUnknownFields();
3470      }
3471      private SaslAuth(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
3472
3473      private static final SaslAuth defaultInstance;
3474      public static SaslAuth getDefaultInstance() {
3475        return defaultInstance;
3476      }
3477
3478      public SaslAuth getDefaultInstanceForType() {
3479        return defaultInstance;
3480      }
3481
3482      private final com.google.protobuf.UnknownFieldSet unknownFields;
3483      @java.lang.Override
3484      public final com.google.protobuf.UnknownFieldSet
3485          getUnknownFields() {
3486        return this.unknownFields;
3487      }
3488      private SaslAuth(
3489          com.google.protobuf.CodedInputStream input,
3490          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3491          throws com.google.protobuf.InvalidProtocolBufferException {
3492        initFields();
3493        int mutable_bitField0_ = 0;
3494        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
3495            com.google.protobuf.UnknownFieldSet.newBuilder();
3496        try {
3497          boolean done = false;
3498          while (!done) {
3499            int tag = input.readTag();
3500            switch (tag) {
3501              case 0:
3502                done = true;
3503                break;
3504              default: {
3505                if (!parseUnknownField(input, unknownFields,
3506                                       extensionRegistry, tag)) {
3507                  done = true;
3508                }
3509                break;
3510              }
3511              case 10: {
3512                bitField0_ |= 0x00000001;
3513                method_ = input.readBytes();
3514                break;
3515              }
3516              case 18: {
3517                bitField0_ |= 0x00000002;
3518                mechanism_ = input.readBytes();
3519                break;
3520              }
3521              case 26: {
3522                bitField0_ |= 0x00000004;
3523                protocol_ = input.readBytes();
3524                break;
3525              }
3526              case 34: {
3527                bitField0_ |= 0x00000008;
3528                serverId_ = input.readBytes();
3529                break;
3530              }
3531              case 42: {
3532                bitField0_ |= 0x00000010;
3533                challenge_ = input.readBytes();
3534                break;
3535              }
3536            }
3537          }
3538        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
3539          throw e.setUnfinishedMessage(this);
3540        } catch (java.io.IOException e) {
3541          throw new com.google.protobuf.InvalidProtocolBufferException(
3542              e.getMessage()).setUnfinishedMessage(this);
3543        } finally {
3544          this.unknownFields = unknownFields.build();
3545          makeExtensionsImmutable();
3546        }
3547      }
3548      public static final com.google.protobuf.Descriptors.Descriptor
3549          getDescriptor() {
3550        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcSaslProto_SaslAuth_descriptor;
3551      }
3552
3553      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
3554          internalGetFieldAccessorTable() {
3555        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcSaslProto_SaslAuth_fieldAccessorTable
3556            .ensureFieldAccessorsInitialized(
3557                org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder.class);
3558      }
3559
3560      public static com.google.protobuf.Parser<SaslAuth> PARSER =
3561          new com.google.protobuf.AbstractParser<SaslAuth>() {
3562        public SaslAuth parsePartialFrom(
3563            com.google.protobuf.CodedInputStream input,
3564            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3565            throws com.google.protobuf.InvalidProtocolBufferException {
3566          return new SaslAuth(input, extensionRegistry);
3567        }
3568      };
3569
3570      @java.lang.Override
3571      public com.google.protobuf.Parser<SaslAuth> getParserForType() {
3572        return PARSER;
3573      }
3574
3575      private int bitField0_;
3576      // required string method = 1;
3577      public static final int METHOD_FIELD_NUMBER = 1;
3578      private java.lang.Object method_;
3579      /**
3580       * <code>required string method = 1;</code>
3581       */
3582      public boolean hasMethod() {
3583        return ((bitField0_ & 0x00000001) == 0x00000001);
3584      }
3585      /**
3586       * <code>required string method = 1;</code>
3587       */
3588      public java.lang.String getMethod() {
3589        java.lang.Object ref = method_;
3590        if (ref instanceof java.lang.String) {
3591          return (java.lang.String) ref;
3592        } else {
3593          com.google.protobuf.ByteString bs = 
3594              (com.google.protobuf.ByteString) ref;
3595          java.lang.String s = bs.toStringUtf8();
3596          if (bs.isValidUtf8()) {
3597            method_ = s;
3598          }
3599          return s;
3600        }
3601      }
3602      /**
3603       * <code>required string method = 1;</code>
3604       */
3605      public com.google.protobuf.ByteString
3606          getMethodBytes() {
3607        java.lang.Object ref = method_;
3608        if (ref instanceof java.lang.String) {
3609          com.google.protobuf.ByteString b = 
3610              com.google.protobuf.ByteString.copyFromUtf8(
3611                  (java.lang.String) ref);
3612          method_ = b;
3613          return b;
3614        } else {
3615          return (com.google.protobuf.ByteString) ref;
3616        }
3617      }
3618
3619      // required string mechanism = 2;
3620      public static final int MECHANISM_FIELD_NUMBER = 2;
3621      private java.lang.Object mechanism_;
3622      /**
3623       * <code>required string mechanism = 2;</code>
3624       */
3625      public boolean hasMechanism() {
3626        return ((bitField0_ & 0x00000002) == 0x00000002);
3627      }
3628      /**
3629       * <code>required string mechanism = 2;</code>
3630       */
3631      public java.lang.String getMechanism() {
3632        java.lang.Object ref = mechanism_;
3633        if (ref instanceof java.lang.String) {
3634          return (java.lang.String) ref;
3635        } else {
3636          com.google.protobuf.ByteString bs = 
3637              (com.google.protobuf.ByteString) ref;
3638          java.lang.String s = bs.toStringUtf8();
3639          if (bs.isValidUtf8()) {
3640            mechanism_ = s;
3641          }
3642          return s;
3643        }
3644      }
3645      /**
3646       * <code>required string mechanism = 2;</code>
3647       */
3648      public com.google.protobuf.ByteString
3649          getMechanismBytes() {
3650        java.lang.Object ref = mechanism_;
3651        if (ref instanceof java.lang.String) {
3652          com.google.protobuf.ByteString b = 
3653              com.google.protobuf.ByteString.copyFromUtf8(
3654                  (java.lang.String) ref);
3655          mechanism_ = b;
3656          return b;
3657        } else {
3658          return (com.google.protobuf.ByteString) ref;
3659        }
3660      }
3661
3662      // optional string protocol = 3;
3663      public static final int PROTOCOL_FIELD_NUMBER = 3;
3664      private java.lang.Object protocol_;
3665      /**
3666       * <code>optional string protocol = 3;</code>
3667       */
3668      public boolean hasProtocol() {
3669        return ((bitField0_ & 0x00000004) == 0x00000004);
3670      }
3671      /**
3672       * <code>optional string protocol = 3;</code>
3673       */
3674      public java.lang.String getProtocol() {
3675        java.lang.Object ref = protocol_;
3676        if (ref instanceof java.lang.String) {
3677          return (java.lang.String) ref;
3678        } else {
3679          com.google.protobuf.ByteString bs = 
3680              (com.google.protobuf.ByteString) ref;
3681          java.lang.String s = bs.toStringUtf8();
3682          if (bs.isValidUtf8()) {
3683            protocol_ = s;
3684          }
3685          return s;
3686        }
3687      }
3688      /**
3689       * <code>optional string protocol = 3;</code>
3690       */
3691      public com.google.protobuf.ByteString
3692          getProtocolBytes() {
3693        java.lang.Object ref = protocol_;
3694        if (ref instanceof java.lang.String) {
3695          com.google.protobuf.ByteString b = 
3696              com.google.protobuf.ByteString.copyFromUtf8(
3697                  (java.lang.String) ref);
3698          protocol_ = b;
3699          return b;
3700        } else {
3701          return (com.google.protobuf.ByteString) ref;
3702        }
3703      }
3704
3705      // optional string serverId = 4;
3706      public static final int SERVERID_FIELD_NUMBER = 4;
3707      private java.lang.Object serverId_;
3708      /**
3709       * <code>optional string serverId = 4;</code>
3710       */
3711      public boolean hasServerId() {
3712        return ((bitField0_ & 0x00000008) == 0x00000008);
3713      }
3714      /**
3715       * <code>optional string serverId = 4;</code>
3716       */
3717      public java.lang.String getServerId() {
3718        java.lang.Object ref = serverId_;
3719        if (ref instanceof java.lang.String) {
3720          return (java.lang.String) ref;
3721        } else {
3722          com.google.protobuf.ByteString bs = 
3723              (com.google.protobuf.ByteString) ref;
3724          java.lang.String s = bs.toStringUtf8();
3725          if (bs.isValidUtf8()) {
3726            serverId_ = s;
3727          }
3728          return s;
3729        }
3730      }
3731      /**
3732       * <code>optional string serverId = 4;</code>
3733       */
3734      public com.google.protobuf.ByteString
3735          getServerIdBytes() {
3736        java.lang.Object ref = serverId_;
3737        if (ref instanceof java.lang.String) {
3738          com.google.protobuf.ByteString b = 
3739              com.google.protobuf.ByteString.copyFromUtf8(
3740                  (java.lang.String) ref);
3741          serverId_ = b;
3742          return b;
3743        } else {
3744          return (com.google.protobuf.ByteString) ref;
3745        }
3746      }
3747
3748      // optional bytes challenge = 5;
3749      public static final int CHALLENGE_FIELD_NUMBER = 5;
3750      private com.google.protobuf.ByteString challenge_;
3751      /**
3752       * <code>optional bytes challenge = 5;</code>
3753       */
3754      public boolean hasChallenge() {
3755        return ((bitField0_ & 0x00000010) == 0x00000010);
3756      }
3757      /**
3758       * <code>optional bytes challenge = 5;</code>
3759       */
3760      public com.google.protobuf.ByteString getChallenge() {
3761        return challenge_;
3762      }
3763
3764      private void initFields() {
3765        method_ = "";
3766        mechanism_ = "";
3767        protocol_ = "";
3768        serverId_ = "";
3769        challenge_ = com.google.protobuf.ByteString.EMPTY;
3770      }
3771      private byte memoizedIsInitialized = -1;
3772      public final boolean isInitialized() {
3773        byte isInitialized = memoizedIsInitialized;
3774        if (isInitialized != -1) return isInitialized == 1;
3775
3776        if (!hasMethod()) {
3777          memoizedIsInitialized = 0;
3778          return false;
3779        }
3780        if (!hasMechanism()) {
3781          memoizedIsInitialized = 0;
3782          return false;
3783        }
3784        memoizedIsInitialized = 1;
3785        return true;
3786      }
3787
3788      public void writeTo(com.google.protobuf.CodedOutputStream output)
3789                          throws java.io.IOException {
3790        getSerializedSize();
3791        if (((bitField0_ & 0x00000001) == 0x00000001)) {
3792          output.writeBytes(1, getMethodBytes());
3793        }
3794        if (((bitField0_ & 0x00000002) == 0x00000002)) {
3795          output.writeBytes(2, getMechanismBytes());
3796        }
3797        if (((bitField0_ & 0x00000004) == 0x00000004)) {
3798          output.writeBytes(3, getProtocolBytes());
3799        }
3800        if (((bitField0_ & 0x00000008) == 0x00000008)) {
3801          output.writeBytes(4, getServerIdBytes());
3802        }
3803        if (((bitField0_ & 0x00000010) == 0x00000010)) {
3804          output.writeBytes(5, challenge_);
3805        }
3806        getUnknownFields().writeTo(output);
3807      }
3808
3809      private int memoizedSerializedSize = -1;
3810      public int getSerializedSize() {
3811        int size = memoizedSerializedSize;
3812        if (size != -1) return size;
3813
3814        size = 0;
3815        if (((bitField0_ & 0x00000001) == 0x00000001)) {
3816          size += com.google.protobuf.CodedOutputStream
3817            .computeBytesSize(1, getMethodBytes());
3818        }
3819        if (((bitField0_ & 0x00000002) == 0x00000002)) {
3820          size += com.google.protobuf.CodedOutputStream
3821            .computeBytesSize(2, getMechanismBytes());
3822        }
3823        if (((bitField0_ & 0x00000004) == 0x00000004)) {
3824          size += com.google.protobuf.CodedOutputStream
3825            .computeBytesSize(3, getProtocolBytes());
3826        }
3827        if (((bitField0_ & 0x00000008) == 0x00000008)) {
3828          size += com.google.protobuf.CodedOutputStream
3829            .computeBytesSize(4, getServerIdBytes());
3830        }
3831        if (((bitField0_ & 0x00000010) == 0x00000010)) {
3832          size += com.google.protobuf.CodedOutputStream
3833            .computeBytesSize(5, challenge_);
3834        }
3835        size += getUnknownFields().getSerializedSize();
3836        memoizedSerializedSize = size;
3837        return size;
3838      }
3839
3840      private static final long serialVersionUID = 0L;
3841      @java.lang.Override
3842      protected java.lang.Object writeReplace()
3843          throws java.io.ObjectStreamException {
3844        return super.writeReplace();
3845      }
3846
3847      @java.lang.Override
3848      public boolean equals(final java.lang.Object obj) {
3849        if (obj == this) {
3850         return true;
3851        }
3852        if (!(obj instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth)) {
3853          return super.equals(obj);
3854        }
3855        org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth other = (org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth) obj;
3856
3857        boolean result = true;
3858        result = result && (hasMethod() == other.hasMethod());
3859        if (hasMethod()) {
3860          result = result && getMethod()
3861              .equals(other.getMethod());
3862        }
3863        result = result && (hasMechanism() == other.hasMechanism());
3864        if (hasMechanism()) {
3865          result = result && getMechanism()
3866              .equals(other.getMechanism());
3867        }
3868        result = result && (hasProtocol() == other.hasProtocol());
3869        if (hasProtocol()) {
3870          result = result && getProtocol()
3871              .equals(other.getProtocol());
3872        }
3873        result = result && (hasServerId() == other.hasServerId());
3874        if (hasServerId()) {
3875          result = result && getServerId()
3876              .equals(other.getServerId());
3877        }
3878        result = result && (hasChallenge() == other.hasChallenge());
3879        if (hasChallenge()) {
3880          result = result && getChallenge()
3881              .equals(other.getChallenge());
3882        }
3883        result = result &&
3884            getUnknownFields().equals(other.getUnknownFields());
3885        return result;
3886      }
3887
3888      private int memoizedHashCode = 0;
3889      @java.lang.Override
3890      public int hashCode() {
3891        if (memoizedHashCode != 0) {
3892          return memoizedHashCode;
3893        }
3894        int hash = 41;
3895        hash = (19 * hash) + getDescriptorForType().hashCode();
3896        if (hasMethod()) {
3897          hash = (37 * hash) + METHOD_FIELD_NUMBER;
3898          hash = (53 * hash) + getMethod().hashCode();
3899        }
3900        if (hasMechanism()) {
3901          hash = (37 * hash) + MECHANISM_FIELD_NUMBER;
3902          hash = (53 * hash) + getMechanism().hashCode();
3903        }
3904        if (hasProtocol()) {
3905          hash = (37 * hash) + PROTOCOL_FIELD_NUMBER;
3906          hash = (53 * hash) + getProtocol().hashCode();
3907        }
3908        if (hasServerId()) {
3909          hash = (37 * hash) + SERVERID_FIELD_NUMBER;
3910          hash = (53 * hash) + getServerId().hashCode();
3911        }
3912        if (hasChallenge()) {
3913          hash = (37 * hash) + CHALLENGE_FIELD_NUMBER;
3914          hash = (53 * hash) + getChallenge().hashCode();
3915        }
3916        hash = (29 * hash) + getUnknownFields().hashCode();
3917        memoizedHashCode = hash;
3918        return hash;
3919      }
3920
3921      public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parseFrom(
3922          com.google.protobuf.ByteString data)
3923          throws com.google.protobuf.InvalidProtocolBufferException {
3924        return PARSER.parseFrom(data);
3925      }
3926      public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parseFrom(
3927          com.google.protobuf.ByteString data,
3928          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3929          throws com.google.protobuf.InvalidProtocolBufferException {
3930        return PARSER.parseFrom(data, extensionRegistry);
3931      }
3932      public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parseFrom(byte[] data)
3933          throws com.google.protobuf.InvalidProtocolBufferException {
3934        return PARSER.parseFrom(data);
3935      }
3936      public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parseFrom(
3937          byte[] data,
3938          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3939          throws com.google.protobuf.InvalidProtocolBufferException {
3940        return PARSER.parseFrom(data, extensionRegistry);
3941      }
3942      public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parseFrom(java.io.InputStream input)
3943          throws java.io.IOException {
3944        return PARSER.parseFrom(input);
3945      }
3946      public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parseFrom(
3947          java.io.InputStream input,
3948          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3949          throws java.io.IOException {
3950        return PARSER.parseFrom(input, extensionRegistry);
3951      }
3952      public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parseDelimitedFrom(java.io.InputStream input)
3953          throws java.io.IOException {
3954        return PARSER.parseDelimitedFrom(input);
3955      }
3956      public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parseDelimitedFrom(
3957          java.io.InputStream input,
3958          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3959          throws java.io.IOException {
3960        return PARSER.parseDelimitedFrom(input, extensionRegistry);
3961      }
3962      public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parseFrom(
3963          com.google.protobuf.CodedInputStream input)
3964          throws java.io.IOException {
3965        return PARSER.parseFrom(input);
3966      }
3967      public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parseFrom(
3968          com.google.protobuf.CodedInputStream input,
3969          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3970          throws java.io.IOException {
3971        return PARSER.parseFrom(input, extensionRegistry);
3972      }
3973
3974      public static Builder newBuilder() { return Builder.create(); }
3975      public Builder newBuilderForType() { return newBuilder(); }
3976      public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth prototype) {
3977        return newBuilder().mergeFrom(prototype);
3978      }
3979      public Builder toBuilder() { return newBuilder(this); }
3980
3981      @java.lang.Override
3982      protected Builder newBuilderForType(
3983          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
3984        Builder builder = new Builder(parent);
3985        return builder;
3986      }
3987      /**
3988       * Protobuf type {@code hadoop.common.RpcSaslProto.SaslAuth}
3989       */
3990      public static final class Builder extends
3991          com.google.protobuf.GeneratedMessage.Builder<Builder>
3992         implements org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuthOrBuilder {
3993        public static final com.google.protobuf.Descriptors.Descriptor
3994            getDescriptor() {
3995          return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcSaslProto_SaslAuth_descriptor;
3996        }
3997
3998        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
3999            internalGetFieldAccessorTable() {
4000          return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcSaslProto_SaslAuth_fieldAccessorTable
4001              .ensureFieldAccessorsInitialized(
4002                  org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder.class);
4003        }
4004
4005        // Construct using org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.newBuilder()
4006        private Builder() {
4007          maybeForceBuilderInitialization();
4008        }
4009
4010        private Builder(
4011            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
4012          super(parent);
4013          maybeForceBuilderInitialization();
4014        }
4015        private void maybeForceBuilderInitialization() {
4016          if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
4017          }
4018        }
4019        private static Builder create() {
4020          return new Builder();
4021        }
4022
4023        public Builder clear() {
4024          super.clear();
4025          method_ = "";
4026          bitField0_ = (bitField0_ & ~0x00000001);
4027          mechanism_ = "";
4028          bitField0_ = (bitField0_ & ~0x00000002);
4029          protocol_ = "";
4030          bitField0_ = (bitField0_ & ~0x00000004);
4031          serverId_ = "";
4032          bitField0_ = (bitField0_ & ~0x00000008);
4033          challenge_ = com.google.protobuf.ByteString.EMPTY;
4034          bitField0_ = (bitField0_ & ~0x00000010);
4035          return this;
4036        }
4037
4038        public Builder clone() {
4039          return create().mergeFrom(buildPartial());
4040        }
4041
4042        public com.google.protobuf.Descriptors.Descriptor
4043            getDescriptorForType() {
4044          return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcSaslProto_SaslAuth_descriptor;
4045        }
4046
4047        public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth getDefaultInstanceForType() {
4048          return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.getDefaultInstance();
4049        }
4050
4051        public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth build() {
4052          org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth result = buildPartial();
4053          if (!result.isInitialized()) {
4054            throw newUninitializedMessageException(result);
4055          }
4056          return result;
4057        }
4058
4059        public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth buildPartial() {
4060          org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth result = new org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth(this);
4061          int from_bitField0_ = bitField0_;
4062          int to_bitField0_ = 0;
4063          if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
4064            to_bitField0_ |= 0x00000001;
4065          }
4066          result.method_ = method_;
4067          if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
4068            to_bitField0_ |= 0x00000002;
4069          }
4070          result.mechanism_ = mechanism_;
4071          if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
4072            to_bitField0_ |= 0x00000004;
4073          }
4074          result.protocol_ = protocol_;
4075          if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
4076            to_bitField0_ |= 0x00000008;
4077          }
4078          result.serverId_ = serverId_;
4079          if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
4080            to_bitField0_ |= 0x00000010;
4081          }
4082          result.challenge_ = challenge_;
4083          result.bitField0_ = to_bitField0_;
4084          onBuilt();
4085          return result;
4086        }
4087
4088        public Builder mergeFrom(com.google.protobuf.Message other) {
4089          if (other instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth) {
4090            return mergeFrom((org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth)other);
4091          } else {
4092            super.mergeFrom(other);
4093            return this;
4094          }
4095        }
4096
4097        public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth other) {
4098          if (other == org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.getDefaultInstance()) return this;
4099          if (other.hasMethod()) {
4100            bitField0_ |= 0x00000001;
4101            method_ = other.method_;
4102            onChanged();
4103          }
4104          if (other.hasMechanism()) {
4105            bitField0_ |= 0x00000002;
4106            mechanism_ = other.mechanism_;
4107            onChanged();
4108          }
4109          if (other.hasProtocol()) {
4110            bitField0_ |= 0x00000004;
4111            protocol_ = other.protocol_;
4112            onChanged();
4113          }
4114          if (other.hasServerId()) {
4115            bitField0_ |= 0x00000008;
4116            serverId_ = other.serverId_;
4117            onChanged();
4118          }
4119          if (other.hasChallenge()) {
4120            setChallenge(other.getChallenge());
4121          }
4122          this.mergeUnknownFields(other.getUnknownFields());
4123          return this;
4124        }
4125
4126        public final boolean isInitialized() {
4127          if (!hasMethod()) {
4128            
4129            return false;
4130          }
4131          if (!hasMechanism()) {
4132            
4133            return false;
4134          }
4135          return true;
4136        }
4137
4138        public Builder mergeFrom(
4139            com.google.protobuf.CodedInputStream input,
4140            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4141            throws java.io.IOException {
4142          org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parsedMessage = null;
4143          try {
4144            parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
4145          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
4146            parsedMessage = (org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth) e.getUnfinishedMessage();
4147            throw e;
4148          } finally {
4149            if (parsedMessage != null) {
4150              mergeFrom(parsedMessage);
4151            }
4152          }
4153          return this;
4154        }
4155        private int bitField0_;
4156
4157        // required string method = 1;
4158        private java.lang.Object method_ = "";
4159        /**
4160         * <code>required string method = 1;</code>
4161         */
4162        public boolean hasMethod() {
4163          return ((bitField0_ & 0x00000001) == 0x00000001);
4164        }
4165        /**
4166         * <code>required string method = 1;</code>
4167         */
4168        public java.lang.String getMethod() {
4169          java.lang.Object ref = method_;
4170          if (!(ref instanceof java.lang.String)) {
4171            java.lang.String s = ((com.google.protobuf.ByteString) ref)
4172                .toStringUtf8();
4173            method_ = s;
4174            return s;
4175          } else {
4176            return (java.lang.String) ref;
4177          }
4178        }
4179        /**
4180         * <code>required string method = 1;</code>
4181         */
4182        public com.google.protobuf.ByteString
4183            getMethodBytes() {
4184          java.lang.Object ref = method_;
4185          if (ref instanceof String) {
4186            com.google.protobuf.ByteString b = 
4187                com.google.protobuf.ByteString.copyFromUtf8(
4188                    (java.lang.String) ref);
4189            method_ = b;
4190            return b;
4191          } else {
4192            return (com.google.protobuf.ByteString) ref;
4193          }
4194        }
4195        /**
4196         * <code>required string method = 1;</code>
4197         */
4198        public Builder setMethod(
4199            java.lang.String value) {
4200          if (value == null) {
4201    throw new NullPointerException();
4202  }
4203  bitField0_ |= 0x00000001;
4204          method_ = value;
4205          onChanged();
4206          return this;
4207        }
4208        /**
4209         * <code>required string method = 1;</code>
4210         */
4211        public Builder clearMethod() {
4212          bitField0_ = (bitField0_ & ~0x00000001);
4213          method_ = getDefaultInstance().getMethod();
4214          onChanged();
4215          return this;
4216        }
4217        /**
4218         * <code>required string method = 1;</code>
4219         */
4220        public Builder setMethodBytes(
4221            com.google.protobuf.ByteString value) {
4222          if (value == null) {
4223    throw new NullPointerException();
4224  }
4225  bitField0_ |= 0x00000001;
4226          method_ = value;
4227          onChanged();
4228          return this;
4229        }
4230
4231        // required string mechanism = 2;
4232        private java.lang.Object mechanism_ = "";
4233        /**
4234         * <code>required string mechanism = 2;</code>
4235         */
4236        public boolean hasMechanism() {
4237          return ((bitField0_ & 0x00000002) == 0x00000002);
4238        }
4239        /**
4240         * <code>required string mechanism = 2;</code>
4241         */
4242        public java.lang.String getMechanism() {
4243          java.lang.Object ref = mechanism_;
4244          if (!(ref instanceof java.lang.String)) {
4245            java.lang.String s = ((com.google.protobuf.ByteString) ref)
4246                .toStringUtf8();
4247            mechanism_ = s;
4248            return s;
4249          } else {
4250            return (java.lang.String) ref;
4251          }
4252        }
4253        /**
4254         * <code>required string mechanism = 2;</code>
4255         */
4256        public com.google.protobuf.ByteString
4257            getMechanismBytes() {
4258          java.lang.Object ref = mechanism_;
4259          if (ref instanceof String) {
4260            com.google.protobuf.ByteString b = 
4261                com.google.protobuf.ByteString.copyFromUtf8(
4262                    (java.lang.String) ref);
4263            mechanism_ = b;
4264            return b;
4265          } else {
4266            return (com.google.protobuf.ByteString) ref;
4267          }
4268        }
4269        /**
4270         * <code>required string mechanism = 2;</code>
4271         */
4272        public Builder setMechanism(
4273            java.lang.String value) {
4274          if (value == null) {
4275    throw new NullPointerException();
4276  }
4277  bitField0_ |= 0x00000002;
4278          mechanism_ = value;
4279          onChanged();
4280          return this;
4281        }
4282        /**
4283         * <code>required string mechanism = 2;</code>
4284         */
4285        public Builder clearMechanism() {
4286          bitField0_ = (bitField0_ & ~0x00000002);
4287          mechanism_ = getDefaultInstance().getMechanism();
4288          onChanged();
4289          return this;
4290        }
4291        /**
4292         * <code>required string mechanism = 2;</code>
4293         */
4294        public Builder setMechanismBytes(
4295            com.google.protobuf.ByteString value) {
4296          if (value == null) {
4297    throw new NullPointerException();
4298  }
4299  bitField0_ |= 0x00000002;
4300          mechanism_ = value;
4301          onChanged();
4302          return this;
4303        }
4304
4305        // optional string protocol = 3;
4306        private java.lang.Object protocol_ = "";
4307        /**
4308         * <code>optional string protocol = 3;</code>
4309         */
4310        public boolean hasProtocol() {
4311          return ((bitField0_ & 0x00000004) == 0x00000004);
4312        }
4313        /**
4314         * <code>optional string protocol = 3;</code>
4315         */
4316        public java.lang.String getProtocol() {
4317          java.lang.Object ref = protocol_;
4318          if (!(ref instanceof java.lang.String)) {
4319            java.lang.String s = ((com.google.protobuf.ByteString) ref)
4320                .toStringUtf8();
4321            protocol_ = s;
4322            return s;
4323          } else {
4324            return (java.lang.String) ref;
4325          }
4326        }
4327        /**
4328         * <code>optional string protocol = 3;</code>
4329         */
4330        public com.google.protobuf.ByteString
4331            getProtocolBytes() {
4332          java.lang.Object ref = protocol_;
4333          if (ref instanceof String) {
4334            com.google.protobuf.ByteString b = 
4335                com.google.protobuf.ByteString.copyFromUtf8(
4336                    (java.lang.String) ref);
4337            protocol_ = b;
4338            return b;
4339          } else {
4340            return (com.google.protobuf.ByteString) ref;
4341          }
4342        }
4343        /**
4344         * <code>optional string protocol = 3;</code>
4345         */
4346        public Builder setProtocol(
4347            java.lang.String value) {
4348          if (value == null) {
4349    throw new NullPointerException();
4350  }
4351  bitField0_ |= 0x00000004;
4352          protocol_ = value;
4353          onChanged();
4354          return this;
4355        }
4356        /**
4357         * <code>optional string protocol = 3;</code>
4358         */
4359        public Builder clearProtocol() {
4360          bitField0_ = (bitField0_ & ~0x00000004);
4361          protocol_ = getDefaultInstance().getProtocol();
4362          onChanged();
4363          return this;
4364        }
4365        /**
4366         * <code>optional string protocol = 3;</code>
4367         */
4368        public Builder setProtocolBytes(
4369            com.google.protobuf.ByteString value) {
4370          if (value == null) {
4371    throw new NullPointerException();
4372  }
4373  bitField0_ |= 0x00000004;
4374          protocol_ = value;
4375          onChanged();
4376          return this;
4377        }
4378
4379        // optional string serverId = 4;
4380        private java.lang.Object serverId_ = "";
4381        /**
4382         * <code>optional string serverId = 4;</code>
4383         */
4384        public boolean hasServerId() {
4385          return ((bitField0_ & 0x00000008) == 0x00000008);
4386        }
4387        /**
4388         * <code>optional string serverId = 4;</code>
4389         */
4390        public java.lang.String getServerId() {
4391          java.lang.Object ref = serverId_;
4392          if (!(ref instanceof java.lang.String)) {
4393            java.lang.String s = ((com.google.protobuf.ByteString) ref)
4394                .toStringUtf8();
4395            serverId_ = s;
4396            return s;
4397          } else {
4398            return (java.lang.String) ref;
4399          }
4400        }
4401        /**
4402         * <code>optional string serverId = 4;</code>
4403         */
4404        public com.google.protobuf.ByteString
4405            getServerIdBytes() {
4406          java.lang.Object ref = serverId_;
4407          if (ref instanceof String) {
4408            com.google.protobuf.ByteString b = 
4409                com.google.protobuf.ByteString.copyFromUtf8(
4410                    (java.lang.String) ref);
4411            serverId_ = b;
4412            return b;
4413          } else {
4414            return (com.google.protobuf.ByteString) ref;
4415          }
4416        }
4417        /**
4418         * <code>optional string serverId = 4;</code>
4419         */
4420        public Builder setServerId(
4421            java.lang.String value) {
4422          if (value == null) {
4423    throw new NullPointerException();
4424  }
4425  bitField0_ |= 0x00000008;
4426          serverId_ = value;
4427          onChanged();
4428          return this;
4429        }
4430        /**
4431         * <code>optional string serverId = 4;</code>
4432         */
4433        public Builder clearServerId() {
4434          bitField0_ = (bitField0_ & ~0x00000008);
4435          serverId_ = getDefaultInstance().getServerId();
4436          onChanged();
4437          return this;
4438        }
4439        /**
4440         * <code>optional string serverId = 4;</code>
4441         */
4442        public Builder setServerIdBytes(
4443            com.google.protobuf.ByteString value) {
4444          if (value == null) {
4445    throw new NullPointerException();
4446  }
4447  bitField0_ |= 0x00000008;
4448          serverId_ = value;
4449          onChanged();
4450          return this;
4451        }
4452
4453        // optional bytes challenge = 5;
4454        private com.google.protobuf.ByteString challenge_ = com.google.protobuf.ByteString.EMPTY;
4455        /**
4456         * <code>optional bytes challenge = 5;</code>
4457         */
4458        public boolean hasChallenge() {
4459          return ((bitField0_ & 0x00000010) == 0x00000010);
4460        }
4461        /**
4462         * <code>optional bytes challenge = 5;</code>
4463         */
4464        public com.google.protobuf.ByteString getChallenge() {
4465          return challenge_;
4466        }
4467        /**
4468         * <code>optional bytes challenge = 5;</code>
4469         */
4470        public Builder setChallenge(com.google.protobuf.ByteString value) {
4471          if (value == null) {
4472    throw new NullPointerException();
4473  }
4474  bitField0_ |= 0x00000010;
4475          challenge_ = value;
4476          onChanged();
4477          return this;
4478        }
4479        /**
4480         * <code>optional bytes challenge = 5;</code>
4481         */
4482        public Builder clearChallenge() {
4483          bitField0_ = (bitField0_ & ~0x00000010);
4484          challenge_ = getDefaultInstance().getChallenge();
4485          onChanged();
4486          return this;
4487        }
4488
4489        // @@protoc_insertion_point(builder_scope:hadoop.common.RpcSaslProto.SaslAuth)
4490      }
4491
4492      static {
4493        defaultInstance = new SaslAuth(true);
4494        defaultInstance.initFields();
4495      }
4496
4497      // @@protoc_insertion_point(class_scope:hadoop.common.RpcSaslProto.SaslAuth)
4498    }
4499
4500    private int bitField0_;
4501    // optional uint32 version = 1;
4502    public static final int VERSION_FIELD_NUMBER = 1;
4503    private int version_;
4504    /**
4505     * <code>optional uint32 version = 1;</code>
4506     */
4507    public boolean hasVersion() {
4508      return ((bitField0_ & 0x00000001) == 0x00000001);
4509    }
4510    /**
4511     * <code>optional uint32 version = 1;</code>
4512     */
4513    public int getVersion() {
4514      return version_;
4515    }
4516
4517    // required .hadoop.common.RpcSaslProto.SaslState state = 2;
4518    public static final int STATE_FIELD_NUMBER = 2;
4519    private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState state_;
4520    /**
4521     * <code>required .hadoop.common.RpcSaslProto.SaslState state = 2;</code>
4522     */
4523    public boolean hasState() {
4524      return ((bitField0_ & 0x00000002) == 0x00000002);
4525    }
4526    /**
4527     * <code>required .hadoop.common.RpcSaslProto.SaslState state = 2;</code>
4528     */
4529    public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState getState() {
4530      return state_;
4531    }
4532
4533    // optional bytes token = 3;
4534    public static final int TOKEN_FIELD_NUMBER = 3;
4535    private com.google.protobuf.ByteString token_;
4536    /**
4537     * <code>optional bytes token = 3;</code>
4538     */
4539    public boolean hasToken() {
4540      return ((bitField0_ & 0x00000004) == 0x00000004);
4541    }
4542    /**
4543     * <code>optional bytes token = 3;</code>
4544     */
4545    public com.google.protobuf.ByteString getToken() {
4546      return token_;
4547    }
4548
4549    // repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
4550    public static final int AUTHS_FIELD_NUMBER = 4;
4551    private java.util.List<org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth> auths_;
4552    /**
4553     * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
4554     */
4555    public java.util.List<org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth> getAuthsList() {
4556      return auths_;
4557    }
4558    /**
4559     * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
4560     */
4561    public java.util.List<? extends org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuthOrBuilder> 
4562        getAuthsOrBuilderList() {
4563      return auths_;
4564    }
4565    /**
4566     * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
4567     */
4568    public int getAuthsCount() {
4569      return auths_.size();
4570    }
4571    /**
4572     * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
4573     */
4574    public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth getAuths(int index) {
4575      return auths_.get(index);
4576    }
4577    /**
4578     * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
4579     */
4580    public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuthOrBuilder getAuthsOrBuilder(
4581        int index) {
4582      return auths_.get(index);
4583    }
4584
4585    private void initFields() {
4586      version_ = 0;
4587      state_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState.SUCCESS;
4588      token_ = com.google.protobuf.ByteString.EMPTY;
4589      auths_ = java.util.Collections.emptyList();
4590    }
4591    private byte memoizedIsInitialized = -1;
4592    public final boolean isInitialized() {
4593      byte isInitialized = memoizedIsInitialized;
4594      if (isInitialized != -1) return isInitialized == 1;
4595
4596      if (!hasState()) {
4597        memoizedIsInitialized = 0;
4598        return false;
4599      }
4600      for (int i = 0; i < getAuthsCount(); i++) {
4601        if (!getAuths(i).isInitialized()) {
4602          memoizedIsInitialized = 0;
4603          return false;
4604        }
4605      }
4606      memoizedIsInitialized = 1;
4607      return true;
4608    }
4609
4610    public void writeTo(com.google.protobuf.CodedOutputStream output)
4611                        throws java.io.IOException {
4612      getSerializedSize();
4613      if (((bitField0_ & 0x00000001) == 0x00000001)) {
4614        output.writeUInt32(1, version_);
4615      }
4616      if (((bitField0_ & 0x00000002) == 0x00000002)) {
4617        output.writeEnum(2, state_.getNumber());
4618      }
4619      if (((bitField0_ & 0x00000004) == 0x00000004)) {
4620        output.writeBytes(3, token_);
4621      }
4622      for (int i = 0; i < auths_.size(); i++) {
4623        output.writeMessage(4, auths_.get(i));
4624      }
4625      getUnknownFields().writeTo(output);
4626    }
4627
4628    private int memoizedSerializedSize = -1;
4629    public int getSerializedSize() {
4630      int size = memoizedSerializedSize;
4631      if (size != -1) return size;
4632
4633      size = 0;
4634      if (((bitField0_ & 0x00000001) == 0x00000001)) {
4635        size += com.google.protobuf.CodedOutputStream
4636          .computeUInt32Size(1, version_);
4637      }
4638      if (((bitField0_ & 0x00000002) == 0x00000002)) {
4639        size += com.google.protobuf.CodedOutputStream
4640          .computeEnumSize(2, state_.getNumber());
4641      }
4642      if (((bitField0_ & 0x00000004) == 0x00000004)) {
4643        size += com.google.protobuf.CodedOutputStream
4644          .computeBytesSize(3, token_);
4645      }
4646      for (int i = 0; i < auths_.size(); i++) {
4647        size += com.google.protobuf.CodedOutputStream
4648          .computeMessageSize(4, auths_.get(i));
4649      }
4650      size += getUnknownFields().getSerializedSize();
4651      memoizedSerializedSize = size;
4652      return size;
4653    }
4654
4655    private static final long serialVersionUID = 0L;
4656    @java.lang.Override
4657    protected java.lang.Object writeReplace()
4658        throws java.io.ObjectStreamException {
4659      return super.writeReplace();
4660    }
4661
4662    @java.lang.Override
4663    public boolean equals(final java.lang.Object obj) {
4664      if (obj == this) {
4665       return true;
4666      }
4667      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto)) {
4668        return super.equals(obj);
4669      }
4670      org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto other = (org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto) obj;
4671
4672      boolean result = true;
4673      result = result && (hasVersion() == other.hasVersion());
4674      if (hasVersion()) {
4675        result = result && (getVersion()
4676            == other.getVersion());
4677      }
4678      result = result && (hasState() == other.hasState());
4679      if (hasState()) {
4680        result = result &&
4681            (getState() == other.getState());
4682      }
4683      result = result && (hasToken() == other.hasToken());
4684      if (hasToken()) {
4685        result = result && getToken()
4686            .equals(other.getToken());
4687      }
4688      result = result && getAuthsList()
4689          .equals(other.getAuthsList());
4690      result = result &&
4691          getUnknownFields().equals(other.getUnknownFields());
4692      return result;
4693    }
4694
4695    private int memoizedHashCode = 0;
4696    @java.lang.Override
4697    public int hashCode() {
4698      if (memoizedHashCode != 0) {
4699        return memoizedHashCode;
4700      }
4701      int hash = 41;
4702      hash = (19 * hash) + getDescriptorForType().hashCode();
4703      if (hasVersion()) {
4704        hash = (37 * hash) + VERSION_FIELD_NUMBER;
4705        hash = (53 * hash) + getVersion();
4706      }
4707      if (hasState()) {
4708        hash = (37 * hash) + STATE_FIELD_NUMBER;
4709        hash = (53 * hash) + hashEnum(getState());
4710      }
4711      if (hasToken()) {
4712        hash = (37 * hash) + TOKEN_FIELD_NUMBER;
4713        hash = (53 * hash) + getToken().hashCode();
4714      }
4715      if (getAuthsCount() > 0) {
4716        hash = (37 * hash) + AUTHS_FIELD_NUMBER;
4717        hash = (53 * hash) + getAuthsList().hashCode();
4718      }
4719      hash = (29 * hash) + getUnknownFields().hashCode();
4720      memoizedHashCode = hash;
4721      return hash;
4722    }
4723
4724    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parseFrom(
4725        com.google.protobuf.ByteString data)
4726        throws com.google.protobuf.InvalidProtocolBufferException {
4727      return PARSER.parseFrom(data);
4728    }
4729    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parseFrom(
4730        com.google.protobuf.ByteString data,
4731        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4732        throws com.google.protobuf.InvalidProtocolBufferException {
4733      return PARSER.parseFrom(data, extensionRegistry);
4734    }
4735    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parseFrom(byte[] data)
4736        throws com.google.protobuf.InvalidProtocolBufferException {
4737      return PARSER.parseFrom(data);
4738    }
4739    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parseFrom(
4740        byte[] data,
4741        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4742        throws com.google.protobuf.InvalidProtocolBufferException {
4743      return PARSER.parseFrom(data, extensionRegistry);
4744    }
4745    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parseFrom(java.io.InputStream input)
4746        throws java.io.IOException {
4747      return PARSER.parseFrom(input);
4748    }
4749    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parseFrom(
4750        java.io.InputStream input,
4751        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4752        throws java.io.IOException {
4753      return PARSER.parseFrom(input, extensionRegistry);
4754    }
4755    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parseDelimitedFrom(java.io.InputStream input)
4756        throws java.io.IOException {
4757      return PARSER.parseDelimitedFrom(input);
4758    }
4759    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parseDelimitedFrom(
4760        java.io.InputStream input,
4761        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4762        throws java.io.IOException {
4763      return PARSER.parseDelimitedFrom(input, extensionRegistry);
4764    }
4765    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parseFrom(
4766        com.google.protobuf.CodedInputStream input)
4767        throws java.io.IOException {
4768      return PARSER.parseFrom(input);
4769    }
4770    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parseFrom(
4771        com.google.protobuf.CodedInputStream input,
4772        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4773        throws java.io.IOException {
4774      return PARSER.parseFrom(input, extensionRegistry);
4775    }
4776
4777    public static Builder newBuilder() { return Builder.create(); }
4778    public Builder newBuilderForType() { return newBuilder(); }
4779    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto prototype) {
4780      return newBuilder().mergeFrom(prototype);
4781    }
4782    public Builder toBuilder() { return newBuilder(this); }
4783
4784    @java.lang.Override
4785    protected Builder newBuilderForType(
4786        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
4787      Builder builder = new Builder(parent);
4788      return builder;
4789    }
4790    /**
4791     * Protobuf type {@code hadoop.common.RpcSaslProto}
4792     */
4793    public static final class Builder extends
4794        com.google.protobuf.GeneratedMessage.Builder<Builder>
4795       implements org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProtoOrBuilder {
4796      public static final com.google.protobuf.Descriptors.Descriptor
4797          getDescriptor() {
4798        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcSaslProto_descriptor;
4799      }
4800
4801      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
4802          internalGetFieldAccessorTable() {
4803        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcSaslProto_fieldAccessorTable
4804            .ensureFieldAccessorsInitialized(
4805                org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.Builder.class);
4806      }
4807
4808      // Construct using org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.newBuilder()
4809      private Builder() {
4810        maybeForceBuilderInitialization();
4811      }
4812
4813      private Builder(
4814          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
4815        super(parent);
4816        maybeForceBuilderInitialization();
4817      }
4818      private void maybeForceBuilderInitialization() {
4819        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
4820          getAuthsFieldBuilder();
4821        }
4822      }
4823      private static Builder create() {
4824        return new Builder();
4825      }
4826
4827      public Builder clear() {
4828        super.clear();
4829        version_ = 0;
4830        bitField0_ = (bitField0_ & ~0x00000001);
4831        state_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState.SUCCESS;
4832        bitField0_ = (bitField0_ & ~0x00000002);
4833        token_ = com.google.protobuf.ByteString.EMPTY;
4834        bitField0_ = (bitField0_ & ~0x00000004);
4835        if (authsBuilder_ == null) {
4836          auths_ = java.util.Collections.emptyList();
4837          bitField0_ = (bitField0_ & ~0x00000008);
4838        } else {
4839          authsBuilder_.clear();
4840        }
4841        return this;
4842      }
4843
4844      public Builder clone() {
4845        return create().mergeFrom(buildPartial());
4846      }
4847
4848      public com.google.protobuf.Descriptors.Descriptor
4849          getDescriptorForType() {
4850        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcSaslProto_descriptor;
4851      }
4852
4853      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto getDefaultInstanceForType() {
4854        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.getDefaultInstance();
4855      }
4856
4857      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto build() {
4858        org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto result = buildPartial();
4859        if (!result.isInitialized()) {
4860          throw newUninitializedMessageException(result);
4861        }
4862        return result;
4863      }
4864
4865      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto buildPartial() {
4866        org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto result = new org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto(this);
4867        int from_bitField0_ = bitField0_;
4868        int to_bitField0_ = 0;
4869        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
4870          to_bitField0_ |= 0x00000001;
4871        }
4872        result.version_ = version_;
4873        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
4874          to_bitField0_ |= 0x00000002;
4875        }
4876        result.state_ = state_;
4877        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
4878          to_bitField0_ |= 0x00000004;
4879        }
4880        result.token_ = token_;
4881        if (authsBuilder_ == null) {
4882          if (((bitField0_ & 0x00000008) == 0x00000008)) {
4883            auths_ = java.util.Collections.unmodifiableList(auths_);
4884            bitField0_ = (bitField0_ & ~0x00000008);
4885          }
4886          result.auths_ = auths_;
4887        } else {
4888          result.auths_ = authsBuilder_.build();
4889        }
4890        result.bitField0_ = to_bitField0_;
4891        onBuilt();
4892        return result;
4893      }
4894
4895      public Builder mergeFrom(com.google.protobuf.Message other) {
4896        if (other instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto) {
4897          return mergeFrom((org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto)other);
4898        } else {
4899          super.mergeFrom(other);
4900          return this;
4901        }
4902      }
4903
4904      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto other) {
4905        if (other == org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.getDefaultInstance()) return this;
4906        if (other.hasVersion()) {
4907          setVersion(other.getVersion());
4908        }
4909        if (other.hasState()) {
4910          setState(other.getState());
4911        }
4912        if (other.hasToken()) {
4913          setToken(other.getToken());
4914        }
4915        if (authsBuilder_ == null) {
4916          if (!other.auths_.isEmpty()) {
4917            if (auths_.isEmpty()) {
4918              auths_ = other.auths_;
4919              bitField0_ = (bitField0_ & ~0x00000008);
4920            } else {
4921              ensureAuthsIsMutable();
4922              auths_.addAll(other.auths_);
4923            }
4924            onChanged();
4925          }
4926        } else {
4927          if (!other.auths_.isEmpty()) {
4928            if (authsBuilder_.isEmpty()) {
4929              authsBuilder_.dispose();
4930              authsBuilder_ = null;
4931              auths_ = other.auths_;
4932              bitField0_ = (bitField0_ & ~0x00000008);
4933              authsBuilder_ = 
4934                com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
4935                   getAuthsFieldBuilder() : null;
4936            } else {
4937              authsBuilder_.addAllMessages(other.auths_);
4938            }
4939          }
4940        }
4941        this.mergeUnknownFields(other.getUnknownFields());
4942        return this;
4943      }
4944
4945      public final boolean isInitialized() {
4946        if (!hasState()) {
4947          
4948          return false;
4949        }
4950        for (int i = 0; i < getAuthsCount(); i++) {
4951          if (!getAuths(i).isInitialized()) {
4952            
4953            return false;
4954          }
4955        }
4956        return true;
4957      }
4958
4959      public Builder mergeFrom(
4960          com.google.protobuf.CodedInputStream input,
4961          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4962          throws java.io.IOException {
4963        org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parsedMessage = null;
4964        try {
4965          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
4966        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
4967          parsedMessage = (org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto) e.getUnfinishedMessage();
4968          throw e;
4969        } finally {
4970          if (parsedMessage != null) {
4971            mergeFrom(parsedMessage);
4972          }
4973        }
4974        return this;
4975      }
4976      private int bitField0_;
4977
4978      // optional uint32 version = 1;
4979      private int version_ ;
4980      /**
4981       * <code>optional uint32 version = 1;</code>
4982       */
4983      public boolean hasVersion() {
4984        return ((bitField0_ & 0x00000001) == 0x00000001);
4985      }
4986      /**
4987       * <code>optional uint32 version = 1;</code>
4988       */
4989      public int getVersion() {
4990        return version_;
4991      }
4992      /**
4993       * <code>optional uint32 version = 1;</code>
4994       */
4995      public Builder setVersion(int value) {
4996        bitField0_ |= 0x00000001;
4997        version_ = value;
4998        onChanged();
4999        return this;
5000      }
5001      /**
5002       * <code>optional uint32 version = 1;</code>
5003       */
5004      public Builder clearVersion() {
5005        bitField0_ = (bitField0_ & ~0x00000001);
5006        version_ = 0;
5007        onChanged();
5008        return this;
5009      }
5010
5011      // required .hadoop.common.RpcSaslProto.SaslState state = 2;
5012      private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState state_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState.SUCCESS;
5013      /**
5014       * <code>required .hadoop.common.RpcSaslProto.SaslState state = 2;</code>
5015       */
5016      public boolean hasState() {
5017        return ((bitField0_ & 0x00000002) == 0x00000002);
5018      }
5019      /**
5020       * <code>required .hadoop.common.RpcSaslProto.SaslState state = 2;</code>
5021       */
5022      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState getState() {
5023        return state_;
5024      }
5025      /**
5026       * <code>required .hadoop.common.RpcSaslProto.SaslState state = 2;</code>
5027       */
5028      public Builder setState(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState value) {
5029        if (value == null) {
5030          throw new NullPointerException();
5031        }
5032        bitField0_ |= 0x00000002;
5033        state_ = value;
5034        onChanged();
5035        return this;
5036      }
5037      /**
5038       * <code>required .hadoop.common.RpcSaslProto.SaslState state = 2;</code>
5039       */
5040      public Builder clearState() {
5041        bitField0_ = (bitField0_ & ~0x00000002);
5042        state_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState.SUCCESS;
5043        onChanged();
5044        return this;
5045      }
5046
5047      // optional bytes token = 3;
5048      private com.google.protobuf.ByteString token_ = com.google.protobuf.ByteString.EMPTY;
5049      /**
5050       * <code>optional bytes token = 3;</code>
5051       */
5052      public boolean hasToken() {
5053        return ((bitField0_ & 0x00000004) == 0x00000004);
5054      }
5055      /**
5056       * <code>optional bytes token = 3;</code>
5057       */
5058      public com.google.protobuf.ByteString getToken() {
5059        return token_;
5060      }
5061      /**
5062       * <code>optional bytes token = 3;</code>
5063       */
5064      public Builder setToken(com.google.protobuf.ByteString value) {
5065        if (value == null) {
5066    throw new NullPointerException();
5067  }
5068  bitField0_ |= 0x00000004;
5069        token_ = value;
5070        onChanged();
5071        return this;
5072      }
5073      /**
5074       * <code>optional bytes token = 3;</code>
5075       */
5076      public Builder clearToken() {
5077        bitField0_ = (bitField0_ & ~0x00000004);
5078        token_ = getDefaultInstance().getToken();
5079        onChanged();
5080        return this;
5081      }
5082
5083      // repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
5084      private java.util.List<org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth> auths_ =
5085        java.util.Collections.emptyList();
5086      private void ensureAuthsIsMutable() {
5087        if (!((bitField0_ & 0x00000008) == 0x00000008)) {
5088          auths_ = new java.util.ArrayList<org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth>(auths_);
5089          bitField0_ |= 0x00000008;
5090         }
5091      }
5092
5093      private com.google.protobuf.RepeatedFieldBuilder<
5094          org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuthOrBuilder> authsBuilder_;
5095
5096      /**
5097       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
5098       */
5099      public java.util.List<org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth> getAuthsList() {
5100        if (authsBuilder_ == null) {
5101          return java.util.Collections.unmodifiableList(auths_);
5102        } else {
5103          return authsBuilder_.getMessageList();
5104        }
5105      }
5106      /**
5107       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
5108       */
5109      public int getAuthsCount() {
5110        if (authsBuilder_ == null) {
5111          return auths_.size();
5112        } else {
5113          return authsBuilder_.getCount();
5114        }
5115      }
5116      /**
5117       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
5118       */
5119      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth getAuths(int index) {
5120        if (authsBuilder_ == null) {
5121          return auths_.get(index);
5122        } else {
5123          return authsBuilder_.getMessage(index);
5124        }
5125      }
5126      /**
5127       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
5128       */
5129      public Builder setAuths(
5130          int index, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth value) {
5131        if (authsBuilder_ == null) {
5132          if (value == null) {
5133            throw new NullPointerException();
5134          }
5135          ensureAuthsIsMutable();
5136          auths_.set(index, value);
5137          onChanged();
5138        } else {
5139          authsBuilder_.setMessage(index, value);
5140        }
5141        return this;
5142      }
5143      /**
5144       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
5145       */
5146      public Builder setAuths(
5147          int index, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder builderForValue) {
5148        if (authsBuilder_ == null) {
5149          ensureAuthsIsMutable();
5150          auths_.set(index, builderForValue.build());
5151          onChanged();
5152        } else {
5153          authsBuilder_.setMessage(index, builderForValue.build());
5154        }
5155        return this;
5156      }
5157      /**
5158       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
5159       */
5160      public Builder addAuths(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth value) {
5161        if (authsBuilder_ == null) {
5162          if (value == null) {
5163            throw new NullPointerException();
5164          }
5165          ensureAuthsIsMutable();
5166          auths_.add(value);
5167          onChanged();
5168        } else {
5169          authsBuilder_.addMessage(value);
5170        }
5171        return this;
5172      }
5173      /**
5174       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
5175       */
5176      public Builder addAuths(
5177          int index, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth value) {
5178        if (authsBuilder_ == null) {
5179          if (value == null) {
5180            throw new NullPointerException();
5181          }
5182          ensureAuthsIsMutable();
5183          auths_.add(index, value);
5184          onChanged();
5185        } else {
5186          authsBuilder_.addMessage(index, value);
5187        }
5188        return this;
5189      }
5190      /**
5191       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
5192       */
5193      public Builder addAuths(
5194          org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder builderForValue) {
5195        if (authsBuilder_ == null) {
5196          ensureAuthsIsMutable();
5197          auths_.add(builderForValue.build());
5198          onChanged();
5199        } else {
5200          authsBuilder_.addMessage(builderForValue.build());
5201        }
5202        return this;
5203      }
5204      /**
5205       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
5206       */
5207      public Builder addAuths(
5208          int index, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder builderForValue) {
5209        if (authsBuilder_ == null) {
5210          ensureAuthsIsMutable();
5211          auths_.add(index, builderForValue.build());
5212          onChanged();
5213        } else {
5214          authsBuilder_.addMessage(index, builderForValue.build());
5215        }
5216        return this;
5217      }
5218      /**
5219       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
5220       */
5221      public Builder addAllAuths(
5222          java.lang.Iterable<? extends org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth> values) {
5223        if (authsBuilder_ == null) {
5224          ensureAuthsIsMutable();
5225          super.addAll(values, auths_);
5226          onChanged();
5227        } else {
5228          authsBuilder_.addAllMessages(values);
5229        }
5230        return this;
5231      }
5232      /**
5233       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
5234       */
5235      public Builder clearAuths() {
5236        if (authsBuilder_ == null) {
5237          auths_ = java.util.Collections.emptyList();
5238          bitField0_ = (bitField0_ & ~0x00000008);
5239          onChanged();
5240        } else {
5241          authsBuilder_.clear();
5242        }
5243        return this;
5244      }
5245      /**
5246       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
5247       */
5248      public Builder removeAuths(int index) {
5249        if (authsBuilder_ == null) {
5250          ensureAuthsIsMutable();
5251          auths_.remove(index);
5252          onChanged();
5253        } else {
5254          authsBuilder_.remove(index);
5255        }
5256        return this;
5257      }
5258      /**
5259       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
5260       */
5261      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder getAuthsBuilder(
5262          int index) {
5263        return getAuthsFieldBuilder().getBuilder(index);
5264      }
5265      /**
5266       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
5267       */
5268      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuthOrBuilder getAuthsOrBuilder(
5269          int index) {
5270        if (authsBuilder_ == null) {
5271          return auths_.get(index);  } else {
5272          return authsBuilder_.getMessageOrBuilder(index);
5273        }
5274      }
5275      /**
5276       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
5277       */
5278      public java.util.List<? extends org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuthOrBuilder> 
5279           getAuthsOrBuilderList() {
5280        if (authsBuilder_ != null) {
5281          return authsBuilder_.getMessageOrBuilderList();
5282        } else {
5283          return java.util.Collections.unmodifiableList(auths_);
5284        }
5285      }
5286      /**
5287       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
5288       */
5289      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder addAuthsBuilder() {
5290        return getAuthsFieldBuilder().addBuilder(
5291            org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.getDefaultInstance());
5292      }
5293      /**
5294       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
5295       */
5296      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder addAuthsBuilder(
5297          int index) {
5298        return getAuthsFieldBuilder().addBuilder(
5299            index, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.getDefaultInstance());
5300      }
5301      /**
5302       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
5303       */
5304      public java.util.List<org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder> 
5305           getAuthsBuilderList() {
5306        return getAuthsFieldBuilder().getBuilderList();
5307      }
5308      private com.google.protobuf.RepeatedFieldBuilder<
5309          org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuthOrBuilder> 
5310          getAuthsFieldBuilder() {
5311        if (authsBuilder_ == null) {
5312          authsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
5313              org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuthOrBuilder>(
5314                  auths_,
5315                  ((bitField0_ & 0x00000008) == 0x00000008),
5316                  getParentForChildren(),
5317                  isClean());
5318          auths_ = null;
5319        }
5320        return authsBuilder_;
5321      }
5322
5323      // @@protoc_insertion_point(builder_scope:hadoop.common.RpcSaslProto)
5324    }
5325
5326    static {
5327      defaultInstance = new RpcSaslProto(true);
5328      defaultInstance.initFields();
5329    }
5330
5331    // @@protoc_insertion_point(class_scope:hadoop.common.RpcSaslProto)
5332  }
5333
5334  private static com.google.protobuf.Descriptors.Descriptor
5335    internal_static_hadoop_common_RpcRequestHeaderProto_descriptor;
5336  private static
5337    com.google.protobuf.GeneratedMessage.FieldAccessorTable
5338      internal_static_hadoop_common_RpcRequestHeaderProto_fieldAccessorTable;
5339  private static com.google.protobuf.Descriptors.Descriptor
5340    internal_static_hadoop_common_RpcResponseHeaderProto_descriptor;
5341  private static
5342    com.google.protobuf.GeneratedMessage.FieldAccessorTable
5343      internal_static_hadoop_common_RpcResponseHeaderProto_fieldAccessorTable;
5344  private static com.google.protobuf.Descriptors.Descriptor
5345    internal_static_hadoop_common_RpcSaslProto_descriptor;
5346  private static
5347    com.google.protobuf.GeneratedMessage.FieldAccessorTable
5348      internal_static_hadoop_common_RpcSaslProto_fieldAccessorTable;
5349  private static com.google.protobuf.Descriptors.Descriptor
5350    internal_static_hadoop_common_RpcSaslProto_SaslAuth_descriptor;
5351  private static
5352    com.google.protobuf.GeneratedMessage.FieldAccessorTable
5353      internal_static_hadoop_common_RpcSaslProto_SaslAuth_fieldAccessorTable;
5354
5355  public static com.google.protobuf.Descriptors.FileDescriptor
5356      getDescriptor() {
5357    return descriptor;
5358  }
5359  private static com.google.protobuf.Descriptors.FileDescriptor
5360      descriptor;
5361  static {
5362    java.lang.String[] descriptorData = {
5363      "\n\017RpcHeader.proto\022\rhadoop.common\"\242\002\n\025Rpc" +
5364      "RequestHeaderProto\022,\n\007rpcKind\030\001 \001(\0162\033.ha" +
5365      "doop.common.RpcKindProto\022B\n\005rpcOp\030\002 \001(\0162" +
5366      "3.hadoop.common.RpcRequestHeaderProto.Op" +
5367      "erationProto\022\016\n\006callId\030\003 \002(\021\022\020\n\010clientId" +
5368      "\030\004 \002(\014\022\026\n\nretryCount\030\005 \001(\021:\002-1\"]\n\016Operat" +
5369      "ionProto\022\024\n\020RPC_FINAL_PACKET\020\000\022\033\n\027RPC_CO" +
5370      "NTINUATION_PACKET\020\001\022\030\n\024RPC_CLOSE_CONNECT" +
5371      "ION\020\002\"\312\005\n\026RpcResponseHeaderProto\022\016\n\006call" +
5372      "Id\030\001 \002(\r\022D\n\006status\030\002 \002(\01624.hadoop.common",
5373      ".RpcResponseHeaderProto.RpcStatusProto\022\033" +
5374      "\n\023serverIpcVersionNum\030\003 \001(\r\022\032\n\022exception" +
5375      "ClassName\030\004 \001(\t\022\020\n\010errorMsg\030\005 \001(\t\022L\n\013err" +
5376      "orDetail\030\006 \001(\01627.hadoop.common.RpcRespon" +
5377      "seHeaderProto.RpcErrorCodeProto\022\020\n\010clien" +
5378      "tId\030\007 \001(\014\022\026\n\nretryCount\030\010 \001(\021:\002-1\"3\n\016Rpc" +
5379      "StatusProto\022\013\n\007SUCCESS\020\000\022\t\n\005ERROR\020\001\022\t\n\005F" +
5380      "ATAL\020\002\"\341\002\n\021RpcErrorCodeProto\022\025\n\021ERROR_AP" +
5381      "PLICATION\020\001\022\030\n\024ERROR_NO_SUCH_METHOD\020\002\022\032\n" +
5382      "\026ERROR_NO_SUCH_PROTOCOL\020\003\022\024\n\020ERROR_RPC_S",
5383      "ERVER\020\004\022\036\n\032ERROR_SERIALIZING_RESPONSE\020\005\022" +
5384      "\036\n\032ERROR_RPC_VERSION_MISMATCH\020\006\022\021\n\rFATAL" +
5385      "_UNKNOWN\020\n\022#\n\037FATAL_UNSUPPORTED_SERIALIZ" +
5386      "ATION\020\013\022\034\n\030FATAL_INVALID_RPC_HEADER\020\014\022\037\n" +
5387      "\033FATAL_DESERIALIZING_REQUEST\020\r\022\032\n\026FATAL_" +
5388      "VERSION_MISMATCH\020\016\022\026\n\022FATAL_UNAUTHORIZED" +
5389      "\020\017\"\335\002\n\014RpcSaslProto\022\017\n\007version\030\001 \001(\r\0224\n\005" +
5390      "state\030\002 \002(\0162%.hadoop.common.RpcSaslProto" +
5391      ".SaslState\022\r\n\005token\030\003 \001(\014\0223\n\005auths\030\004 \003(\013" +
5392      "2$.hadoop.common.RpcSaslProto.SaslAuth\032d",
5393      "\n\010SaslAuth\022\016\n\006method\030\001 \002(\t\022\021\n\tmechanism\030" +
5394      "\002 \002(\t\022\020\n\010protocol\030\003 \001(\t\022\020\n\010serverId\030\004 \001(" +
5395      "\t\022\021\n\tchallenge\030\005 \001(\014\"\\\n\tSaslState\022\013\n\007SUC" +
5396      "CESS\020\000\022\r\n\tNEGOTIATE\020\001\022\014\n\010INITIATE\020\002\022\r\n\tC" +
5397      "HALLENGE\020\003\022\014\n\010RESPONSE\020\004\022\010\n\004WRAP\020\005*J\n\014Rp" +
5398      "cKindProto\022\017\n\013RPC_BUILTIN\020\000\022\020\n\014RPC_WRITA" +
5399      "BLE\020\001\022\027\n\023RPC_PROTOCOL_BUFFER\020\002B4\n\036org.ap" +
5400      "ache.hadoop.ipc.protobufB\017RpcHeaderProto" +
5401      "s\240\001\001"
5402    };
5403    com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
5404      new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
5405        public com.google.protobuf.ExtensionRegistry assignDescriptors(
5406            com.google.protobuf.Descriptors.FileDescriptor root) {
5407          descriptor = root;
5408          internal_static_hadoop_common_RpcRequestHeaderProto_descriptor =
5409            getDescriptor().getMessageTypes().get(0);
5410          internal_static_hadoop_common_RpcRequestHeaderProto_fieldAccessorTable = new
5411            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
5412              internal_static_hadoop_common_RpcRequestHeaderProto_descriptor,
5413              new java.lang.String[] { "RpcKind", "RpcOp", "CallId", "ClientId", "RetryCount", });
5414          internal_static_hadoop_common_RpcResponseHeaderProto_descriptor =
5415            getDescriptor().getMessageTypes().get(1);
5416          internal_static_hadoop_common_RpcResponseHeaderProto_fieldAccessorTable = new
5417            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
5418              internal_static_hadoop_common_RpcResponseHeaderProto_descriptor,
5419              new java.lang.String[] { "CallId", "Status", "ServerIpcVersionNum", "ExceptionClassName", "ErrorMsg", "ErrorDetail", "ClientId", "RetryCount", });
5420          internal_static_hadoop_common_RpcSaslProto_descriptor =
5421            getDescriptor().getMessageTypes().get(2);
5422          internal_static_hadoop_common_RpcSaslProto_fieldAccessorTable = new
5423            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
5424              internal_static_hadoop_common_RpcSaslProto_descriptor,
5425              new java.lang.String[] { "Version", "State", "Token", "Auths", });
5426          internal_static_hadoop_common_RpcSaslProto_SaslAuth_descriptor =
5427            internal_static_hadoop_common_RpcSaslProto_descriptor.getNestedTypes().get(0);
5428          internal_static_hadoop_common_RpcSaslProto_SaslAuth_fieldAccessorTable = new
5429            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
5430              internal_static_hadoop_common_RpcSaslProto_SaslAuth_descriptor,
5431              new java.lang.String[] { "Method", "Mechanism", "Protocol", "ServerId", "Challenge", });
5432          return null;
5433        }
5434      };
5435    com.google.protobuf.Descriptors.FileDescriptor
5436      .internalBuildGeneratedFileFrom(descriptorData,
5437        new com.google.protobuf.Descriptors.FileDescriptor[] {
5438        }, assigner);
5439  }
5440
5441  // @@protoc_insertion_point(outer_class_scope)
5442}