001// Generated by the protocol buffer compiler.  DO NOT EDIT!
002// source: RpcHeader.proto
003
004package org.apache.hadoop.ipc.protobuf;
005
006public final class RpcHeaderProtos {
007  private RpcHeaderProtos() {}
008  public static void registerAllExtensions(
009      com.google.protobuf.ExtensionRegistry registry) {
010  }
011  /**
012   * Protobuf enum {@code hadoop.common.RpcKindProto}
013   *
014   * <pre>
015   **
016   * RpcKind determine the rpcEngine and the serialization of the rpc request
017   * </pre>
018   */
019  public enum RpcKindProto
020      implements com.google.protobuf.ProtocolMessageEnum {
021    /**
022     * <code>RPC_BUILTIN = 0;</code>
023     *
024     * <pre>
025     * Used for built in calls by tests
026     * </pre>
027     */
028    RPC_BUILTIN(0, 0),
029    /**
030     * <code>RPC_WRITABLE = 1;</code>
031     *
032     * <pre>
033     * Use WritableRpcEngine 
034     * </pre>
035     */
036    RPC_WRITABLE(1, 1),
037    /**
038     * <code>RPC_PROTOCOL_BUFFER = 2;</code>
039     *
040     * <pre>
041     * Use ProtobufRpcEngine
042     * </pre>
043     */
044    RPC_PROTOCOL_BUFFER(2, 2),
045    ;
046
047    /**
048     * <code>RPC_BUILTIN = 0;</code>
049     *
050     * <pre>
051     * Used for built in calls by tests
052     * </pre>
053     */
054    public static final int RPC_BUILTIN_VALUE = 0;
055    /**
056     * <code>RPC_WRITABLE = 1;</code>
057     *
058     * <pre>
059     * Use WritableRpcEngine 
060     * </pre>
061     */
062    public static final int RPC_WRITABLE_VALUE = 1;
063    /**
064     * <code>RPC_PROTOCOL_BUFFER = 2;</code>
065     *
066     * <pre>
067     * Use ProtobufRpcEngine
068     * </pre>
069     */
070    public static final int RPC_PROTOCOL_BUFFER_VALUE = 2;
071
072
073    public final int getNumber() { return value; }
074
075    public static RpcKindProto valueOf(int value) {
076      switch (value) {
077        case 0: return RPC_BUILTIN;
078        case 1: return RPC_WRITABLE;
079        case 2: return RPC_PROTOCOL_BUFFER;
080        default: return null;
081      }
082    }
083
084    public static com.google.protobuf.Internal.EnumLiteMap<RpcKindProto>
085        internalGetValueMap() {
086      return internalValueMap;
087    }
088    private static com.google.protobuf.Internal.EnumLiteMap<RpcKindProto>
089        internalValueMap =
090          new com.google.protobuf.Internal.EnumLiteMap<RpcKindProto>() {
091            public RpcKindProto findValueByNumber(int number) {
092              return RpcKindProto.valueOf(number);
093            }
094          };
095
096    public final com.google.protobuf.Descriptors.EnumValueDescriptor
097        getValueDescriptor() {
098      return getDescriptor().getValues().get(index);
099    }
100    public final com.google.protobuf.Descriptors.EnumDescriptor
101        getDescriptorForType() {
102      return getDescriptor();
103    }
104    public static final com.google.protobuf.Descriptors.EnumDescriptor
105        getDescriptor() {
106      return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.getDescriptor().getEnumTypes().get(0);
107    }
108
109    private static final RpcKindProto[] VALUES = values();
110
111    public static RpcKindProto valueOf(
112        com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
113      if (desc.getType() != getDescriptor()) {
114        throw new java.lang.IllegalArgumentException(
115          "EnumValueDescriptor is not for this type.");
116      }
117      return VALUES[desc.getIndex()];
118    }
119
120    private final int index;
121    private final int value;
122
123    private RpcKindProto(int index, int value) {
124      this.index = index;
125      this.value = value;
126    }
127
128    // @@protoc_insertion_point(enum_scope:hadoop.common.RpcKindProto)
129  }
130
131  public interface RPCTraceInfoProtoOrBuilder
132      extends com.google.protobuf.MessageOrBuilder {
133
134    // optional int64 traceId = 1;
135    /**
136     * <code>optional int64 traceId = 1;</code>
137     *
138     * <pre>
139     * parentIdHigh
140     * </pre>
141     */
142    boolean hasTraceId();
143    /**
144     * <code>optional int64 traceId = 1;</code>
145     *
146     * <pre>
147     * parentIdHigh
148     * </pre>
149     */
150    long getTraceId();
151
152    // optional int64 parentId = 2;
153    /**
154     * <code>optional int64 parentId = 2;</code>
155     *
156     * <pre>
157     * parentIdLow
158     * </pre>
159     */
160    boolean hasParentId();
161    /**
162     * <code>optional int64 parentId = 2;</code>
163     *
164     * <pre>
165     * parentIdLow
166     * </pre>
167     */
168    long getParentId();
169  }
170  /**
171   * Protobuf type {@code hadoop.common.RPCTraceInfoProto}
172   *
173   * <pre>
174   **
175   * Used to pass through the information necessary to continue
176   * a trace after an RPC is made. All we need is the traceid
177   * (so we know the overarching trace this message is a part of), and
178   * the id of the current span when this message was sent, so we know
179   * what span caused the new span we will create when this message is received.
180   * </pre>
181   */
182  public static final class RPCTraceInfoProto extends
183      com.google.protobuf.GeneratedMessage
184      implements RPCTraceInfoProtoOrBuilder {
185    // Use RPCTraceInfoProto.newBuilder() to construct.
186    private RPCTraceInfoProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
187      super(builder);
188      this.unknownFields = builder.getUnknownFields();
189    }
190    private RPCTraceInfoProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
191
192    private static final RPCTraceInfoProto defaultInstance;
193    public static RPCTraceInfoProto getDefaultInstance() {
194      return defaultInstance;
195    }
196
197    public RPCTraceInfoProto getDefaultInstanceForType() {
198      return defaultInstance;
199    }
200
201    private final com.google.protobuf.UnknownFieldSet unknownFields;
202    @java.lang.Override
203    public final com.google.protobuf.UnknownFieldSet
204        getUnknownFields() {
205      return this.unknownFields;
206    }
207    private RPCTraceInfoProto(
208        com.google.protobuf.CodedInputStream input,
209        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
210        throws com.google.protobuf.InvalidProtocolBufferException {
211      initFields();
212      int mutable_bitField0_ = 0;
213      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
214          com.google.protobuf.UnknownFieldSet.newBuilder();
215      try {
216        boolean done = false;
217        while (!done) {
218          int tag = input.readTag();
219          switch (tag) {
220            case 0:
221              done = true;
222              break;
223            default: {
224              if (!parseUnknownField(input, unknownFields,
225                                     extensionRegistry, tag)) {
226                done = true;
227              }
228              break;
229            }
230            case 8: {
231              bitField0_ |= 0x00000001;
232              traceId_ = input.readInt64();
233              break;
234            }
235            case 16: {
236              bitField0_ |= 0x00000002;
237              parentId_ = input.readInt64();
238              break;
239            }
240          }
241        }
242      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
243        throw e.setUnfinishedMessage(this);
244      } catch (java.io.IOException e) {
245        throw new com.google.protobuf.InvalidProtocolBufferException(
246            e.getMessage()).setUnfinishedMessage(this);
247      } finally {
248        this.unknownFields = unknownFields.build();
249        makeExtensionsImmutable();
250      }
251    }
252    public static final com.google.protobuf.Descriptors.Descriptor
253        getDescriptor() {
254      return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RPCTraceInfoProto_descriptor;
255    }
256
257    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
258        internalGetFieldAccessorTable() {
259      return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RPCTraceInfoProto_fieldAccessorTable
260          .ensureFieldAccessorsInitialized(
261              org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.Builder.class);
262    }
263
264    public static com.google.protobuf.Parser<RPCTraceInfoProto> PARSER =
265        new com.google.protobuf.AbstractParser<RPCTraceInfoProto>() {
266      public RPCTraceInfoProto parsePartialFrom(
267          com.google.protobuf.CodedInputStream input,
268          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
269          throws com.google.protobuf.InvalidProtocolBufferException {
270        return new RPCTraceInfoProto(input, extensionRegistry);
271      }
272    };
273
274    @java.lang.Override
275    public com.google.protobuf.Parser<RPCTraceInfoProto> getParserForType() {
276      return PARSER;
277    }
278
279    private int bitField0_;
280    // optional int64 traceId = 1;
281    public static final int TRACEID_FIELD_NUMBER = 1;
282    private long traceId_;
283    /**
284     * <code>optional int64 traceId = 1;</code>
285     *
286     * <pre>
287     * parentIdHigh
288     * </pre>
289     */
290    public boolean hasTraceId() {
291      return ((bitField0_ & 0x00000001) == 0x00000001);
292    }
293    /**
294     * <code>optional int64 traceId = 1;</code>
295     *
296     * <pre>
297     * parentIdHigh
298     * </pre>
299     */
300    public long getTraceId() {
301      return traceId_;
302    }
303
304    // optional int64 parentId = 2;
305    public static final int PARENTID_FIELD_NUMBER = 2;
306    private long parentId_;
307    /**
308     * <code>optional int64 parentId = 2;</code>
309     *
310     * <pre>
311     * parentIdLow
312     * </pre>
313     */
314    public boolean hasParentId() {
315      return ((bitField0_ & 0x00000002) == 0x00000002);
316    }
317    /**
318     * <code>optional int64 parentId = 2;</code>
319     *
320     * <pre>
321     * parentIdLow
322     * </pre>
323     */
324    public long getParentId() {
325      return parentId_;
326    }
327
328    private void initFields() {
329      traceId_ = 0L;
330      parentId_ = 0L;
331    }
332    private byte memoizedIsInitialized = -1;
333    public final boolean isInitialized() {
334      byte isInitialized = memoizedIsInitialized;
335      if (isInitialized != -1) return isInitialized == 1;
336
337      memoizedIsInitialized = 1;
338      return true;
339    }
340
341    public void writeTo(com.google.protobuf.CodedOutputStream output)
342                        throws java.io.IOException {
343      getSerializedSize();
344      if (((bitField0_ & 0x00000001) == 0x00000001)) {
345        output.writeInt64(1, traceId_);
346      }
347      if (((bitField0_ & 0x00000002) == 0x00000002)) {
348        output.writeInt64(2, parentId_);
349      }
350      getUnknownFields().writeTo(output);
351    }
352
353    private int memoizedSerializedSize = -1;
354    public int getSerializedSize() {
355      int size = memoizedSerializedSize;
356      if (size != -1) return size;
357
358      size = 0;
359      if (((bitField0_ & 0x00000001) == 0x00000001)) {
360        size += com.google.protobuf.CodedOutputStream
361          .computeInt64Size(1, traceId_);
362      }
363      if (((bitField0_ & 0x00000002) == 0x00000002)) {
364        size += com.google.protobuf.CodedOutputStream
365          .computeInt64Size(2, parentId_);
366      }
367      size += getUnknownFields().getSerializedSize();
368      memoizedSerializedSize = size;
369      return size;
370    }
371
372    private static final long serialVersionUID = 0L;
373    @java.lang.Override
374    protected java.lang.Object writeReplace()
375        throws java.io.ObjectStreamException {
376      return super.writeReplace();
377    }
378
379    @java.lang.Override
380    public boolean equals(final java.lang.Object obj) {
381      if (obj == this) {
382       return true;
383      }
384      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto)) {
385        return super.equals(obj);
386      }
387      org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto other = (org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto) obj;
388
389      boolean result = true;
390      result = result && (hasTraceId() == other.hasTraceId());
391      if (hasTraceId()) {
392        result = result && (getTraceId()
393            == other.getTraceId());
394      }
395      result = result && (hasParentId() == other.hasParentId());
396      if (hasParentId()) {
397        result = result && (getParentId()
398            == other.getParentId());
399      }
400      result = result &&
401          getUnknownFields().equals(other.getUnknownFields());
402      return result;
403    }
404
405    private int memoizedHashCode = 0;
406    @java.lang.Override
407    public int hashCode() {
408      if (memoizedHashCode != 0) {
409        return memoizedHashCode;
410      }
411      int hash = 41;
412      hash = (19 * hash) + getDescriptorForType().hashCode();
413      if (hasTraceId()) {
414        hash = (37 * hash) + TRACEID_FIELD_NUMBER;
415        hash = (53 * hash) + hashLong(getTraceId());
416      }
417      if (hasParentId()) {
418        hash = (37 * hash) + PARENTID_FIELD_NUMBER;
419        hash = (53 * hash) + hashLong(getParentId());
420      }
421      hash = (29 * hash) + getUnknownFields().hashCode();
422      memoizedHashCode = hash;
423      return hash;
424    }
425
426    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto parseFrom(
427        com.google.protobuf.ByteString data)
428        throws com.google.protobuf.InvalidProtocolBufferException {
429      return PARSER.parseFrom(data);
430    }
431    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto parseFrom(
432        com.google.protobuf.ByteString data,
433        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
434        throws com.google.protobuf.InvalidProtocolBufferException {
435      return PARSER.parseFrom(data, extensionRegistry);
436    }
437    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto parseFrom(byte[] data)
438        throws com.google.protobuf.InvalidProtocolBufferException {
439      return PARSER.parseFrom(data);
440    }
441    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto parseFrom(
442        byte[] data,
443        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
444        throws com.google.protobuf.InvalidProtocolBufferException {
445      return PARSER.parseFrom(data, extensionRegistry);
446    }
447    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto parseFrom(java.io.InputStream input)
448        throws java.io.IOException {
449      return PARSER.parseFrom(input);
450    }
451    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto parseFrom(
452        java.io.InputStream input,
453        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
454        throws java.io.IOException {
455      return PARSER.parseFrom(input, extensionRegistry);
456    }
457    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto parseDelimitedFrom(java.io.InputStream input)
458        throws java.io.IOException {
459      return PARSER.parseDelimitedFrom(input);
460    }
461    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto parseDelimitedFrom(
462        java.io.InputStream input,
463        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
464        throws java.io.IOException {
465      return PARSER.parseDelimitedFrom(input, extensionRegistry);
466    }
467    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto parseFrom(
468        com.google.protobuf.CodedInputStream input)
469        throws java.io.IOException {
470      return PARSER.parseFrom(input);
471    }
472    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto parseFrom(
473        com.google.protobuf.CodedInputStream input,
474        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
475        throws java.io.IOException {
476      return PARSER.parseFrom(input, extensionRegistry);
477    }
478
479    public static Builder newBuilder() { return Builder.create(); }
480    public Builder newBuilderForType() { return newBuilder(); }
481    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto prototype) {
482      return newBuilder().mergeFrom(prototype);
483    }
484    public Builder toBuilder() { return newBuilder(this); }
485
486    @java.lang.Override
487    protected Builder newBuilderForType(
488        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
489      Builder builder = new Builder(parent);
490      return builder;
491    }
492    /**
493     * Protobuf type {@code hadoop.common.RPCTraceInfoProto}
494     *
495     * <pre>
496     **
497     * Used to pass through the information necessary to continue
498     * a trace after an RPC is made. All we need is the traceid
499     * (so we know the overarching trace this message is a part of), and
500     * the id of the current span when this message was sent, so we know
501     * what span caused the new span we will create when this message is received.
502     * </pre>
503     */
504    public static final class Builder extends
505        com.google.protobuf.GeneratedMessage.Builder<Builder>
506       implements org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProtoOrBuilder {
507      public static final com.google.protobuf.Descriptors.Descriptor
508          getDescriptor() {
509        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RPCTraceInfoProto_descriptor;
510      }
511
512      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
513          internalGetFieldAccessorTable() {
514        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RPCTraceInfoProto_fieldAccessorTable
515            .ensureFieldAccessorsInitialized(
516                org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.Builder.class);
517      }
518
519      // Construct using org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.newBuilder()
520      private Builder() {
521        maybeForceBuilderInitialization();
522      }
523
524      private Builder(
525          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
526        super(parent);
527        maybeForceBuilderInitialization();
528      }
529      private void maybeForceBuilderInitialization() {
530        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
531        }
532      }
533      private static Builder create() {
534        return new Builder();
535      }
536
537      public Builder clear() {
538        super.clear();
539        traceId_ = 0L;
540        bitField0_ = (bitField0_ & ~0x00000001);
541        parentId_ = 0L;
542        bitField0_ = (bitField0_ & ~0x00000002);
543        return this;
544      }
545
546      public Builder clone() {
547        return create().mergeFrom(buildPartial());
548      }
549
550      public com.google.protobuf.Descriptors.Descriptor
551          getDescriptorForType() {
552        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RPCTraceInfoProto_descriptor;
553      }
554
555      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto getDefaultInstanceForType() {
556        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.getDefaultInstance();
557      }
558
559      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto build() {
560        org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto result = buildPartial();
561        if (!result.isInitialized()) {
562          throw newUninitializedMessageException(result);
563        }
564        return result;
565      }
566
567      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto buildPartial() {
568        org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto result = new org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto(this);
569        int from_bitField0_ = bitField0_;
570        int to_bitField0_ = 0;
571        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
572          to_bitField0_ |= 0x00000001;
573        }
574        result.traceId_ = traceId_;
575        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
576          to_bitField0_ |= 0x00000002;
577        }
578        result.parentId_ = parentId_;
579        result.bitField0_ = to_bitField0_;
580        onBuilt();
581        return result;
582      }
583
584      public Builder mergeFrom(com.google.protobuf.Message other) {
585        if (other instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto) {
586          return mergeFrom((org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto)other);
587        } else {
588          super.mergeFrom(other);
589          return this;
590        }
591      }
592
593      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto other) {
594        if (other == org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.getDefaultInstance()) return this;
595        if (other.hasTraceId()) {
596          setTraceId(other.getTraceId());
597        }
598        if (other.hasParentId()) {
599          setParentId(other.getParentId());
600        }
601        this.mergeUnknownFields(other.getUnknownFields());
602        return this;
603      }
604
605      public final boolean isInitialized() {
606        return true;
607      }
608
609      public Builder mergeFrom(
610          com.google.protobuf.CodedInputStream input,
611          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
612          throws java.io.IOException {
613        org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto parsedMessage = null;
614        try {
615          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
616        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
617          parsedMessage = (org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto) e.getUnfinishedMessage();
618          throw e;
619        } finally {
620          if (parsedMessage != null) {
621            mergeFrom(parsedMessage);
622          }
623        }
624        return this;
625      }
626      private int bitField0_;
627
628      // optional int64 traceId = 1;
629      private long traceId_ ;
630      /**
631       * <code>optional int64 traceId = 1;</code>
632       *
633       * <pre>
634       * parentIdHigh
635       * </pre>
636       */
637      public boolean hasTraceId() {
638        return ((bitField0_ & 0x00000001) == 0x00000001);
639      }
640      /**
641       * <code>optional int64 traceId = 1;</code>
642       *
643       * <pre>
644       * parentIdHigh
645       * </pre>
646       */
647      public long getTraceId() {
648        return traceId_;
649      }
650      /**
651       * <code>optional int64 traceId = 1;</code>
652       *
653       * <pre>
654       * parentIdHigh
655       * </pre>
656       */
657      public Builder setTraceId(long value) {
658        bitField0_ |= 0x00000001;
659        traceId_ = value;
660        onChanged();
661        return this;
662      }
663      /**
664       * <code>optional int64 traceId = 1;</code>
665       *
666       * <pre>
667       * parentIdHigh
668       * </pre>
669       */
670      public Builder clearTraceId() {
671        bitField0_ = (bitField0_ & ~0x00000001);
672        traceId_ = 0L;
673        onChanged();
674        return this;
675      }
676
677      // optional int64 parentId = 2;
678      private long parentId_ ;
679      /**
680       * <code>optional int64 parentId = 2;</code>
681       *
682       * <pre>
683       * parentIdLow
684       * </pre>
685       */
686      public boolean hasParentId() {
687        return ((bitField0_ & 0x00000002) == 0x00000002);
688      }
689      /**
690       * <code>optional int64 parentId = 2;</code>
691       *
692       * <pre>
693       * parentIdLow
694       * </pre>
695       */
696      public long getParentId() {
697        return parentId_;
698      }
699      /**
700       * <code>optional int64 parentId = 2;</code>
701       *
702       * <pre>
703       * parentIdLow
704       * </pre>
705       */
706      public Builder setParentId(long value) {
707        bitField0_ |= 0x00000002;
708        parentId_ = value;
709        onChanged();
710        return this;
711      }
712      /**
713       * <code>optional int64 parentId = 2;</code>
714       *
715       * <pre>
716       * parentIdLow
717       * </pre>
718       */
719      public Builder clearParentId() {
720        bitField0_ = (bitField0_ & ~0x00000002);
721        parentId_ = 0L;
722        onChanged();
723        return this;
724      }
725
726      // @@protoc_insertion_point(builder_scope:hadoop.common.RPCTraceInfoProto)
727    }
728
729    static {
730      defaultInstance = new RPCTraceInfoProto(true);
731      defaultInstance.initFields();
732    }
733
734    // @@protoc_insertion_point(class_scope:hadoop.common.RPCTraceInfoProto)
735  }
736
737  public interface RPCCallerContextProtoOrBuilder
738      extends com.google.protobuf.MessageOrBuilder {
739
740    // required string context = 1;
741    /**
742     * <code>required string context = 1;</code>
743     */
744    boolean hasContext();
745    /**
746     * <code>required string context = 1;</code>
747     */
748    java.lang.String getContext();
749    /**
750     * <code>required string context = 1;</code>
751     */
752    com.google.protobuf.ByteString
753        getContextBytes();
754
755    // optional bytes signature = 2;
756    /**
757     * <code>optional bytes signature = 2;</code>
758     */
759    boolean hasSignature();
760    /**
761     * <code>optional bytes signature = 2;</code>
762     */
763    com.google.protobuf.ByteString getSignature();
764  }
765  /**
766   * Protobuf type {@code hadoop.common.RPCCallerContextProto}
767   *
768   * <pre>
769   **
770   * Used to pass through the call context entry after an RPC is made.
771   * </pre>
772   */
773  public static final class RPCCallerContextProto extends
774      com.google.protobuf.GeneratedMessage
775      implements RPCCallerContextProtoOrBuilder {
776    // Use RPCCallerContextProto.newBuilder() to construct.
777    private RPCCallerContextProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
778      super(builder);
779      this.unknownFields = builder.getUnknownFields();
780    }
781    private RPCCallerContextProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
782
783    private static final RPCCallerContextProto defaultInstance;
784    public static RPCCallerContextProto getDefaultInstance() {
785      return defaultInstance;
786    }
787
788    public RPCCallerContextProto getDefaultInstanceForType() {
789      return defaultInstance;
790    }
791
792    private final com.google.protobuf.UnknownFieldSet unknownFields;
793    @java.lang.Override
794    public final com.google.protobuf.UnknownFieldSet
795        getUnknownFields() {
796      return this.unknownFields;
797    }
798    private RPCCallerContextProto(
799        com.google.protobuf.CodedInputStream input,
800        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
801        throws com.google.protobuf.InvalidProtocolBufferException {
802      initFields();
803      int mutable_bitField0_ = 0;
804      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
805          com.google.protobuf.UnknownFieldSet.newBuilder();
806      try {
807        boolean done = false;
808        while (!done) {
809          int tag = input.readTag();
810          switch (tag) {
811            case 0:
812              done = true;
813              break;
814            default: {
815              if (!parseUnknownField(input, unknownFields,
816                                     extensionRegistry, tag)) {
817                done = true;
818              }
819              break;
820            }
821            case 10: {
822              bitField0_ |= 0x00000001;
823              context_ = input.readBytes();
824              break;
825            }
826            case 18: {
827              bitField0_ |= 0x00000002;
828              signature_ = input.readBytes();
829              break;
830            }
831          }
832        }
833      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
834        throw e.setUnfinishedMessage(this);
835      } catch (java.io.IOException e) {
836        throw new com.google.protobuf.InvalidProtocolBufferException(
837            e.getMessage()).setUnfinishedMessage(this);
838      } finally {
839        this.unknownFields = unknownFields.build();
840        makeExtensionsImmutable();
841      }
842    }
843    public static final com.google.protobuf.Descriptors.Descriptor
844        getDescriptor() {
845      return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RPCCallerContextProto_descriptor;
846    }
847
848    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
849        internalGetFieldAccessorTable() {
850      return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RPCCallerContextProto_fieldAccessorTable
851          .ensureFieldAccessorsInitialized(
852              org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.Builder.class);
853    }
854
855    public static com.google.protobuf.Parser<RPCCallerContextProto> PARSER =
856        new com.google.protobuf.AbstractParser<RPCCallerContextProto>() {
857      public RPCCallerContextProto parsePartialFrom(
858          com.google.protobuf.CodedInputStream input,
859          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
860          throws com.google.protobuf.InvalidProtocolBufferException {
861        return new RPCCallerContextProto(input, extensionRegistry);
862      }
863    };
864
865    @java.lang.Override
866    public com.google.protobuf.Parser<RPCCallerContextProto> getParserForType() {
867      return PARSER;
868    }
869
870    private int bitField0_;
871    // required string context = 1;
872    public static final int CONTEXT_FIELD_NUMBER = 1;
873    private java.lang.Object context_;
874    /**
875     * <code>required string context = 1;</code>
876     */
877    public boolean hasContext() {
878      return ((bitField0_ & 0x00000001) == 0x00000001);
879    }
880    /**
881     * <code>required string context = 1;</code>
882     */
883    public java.lang.String getContext() {
884      java.lang.Object ref = context_;
885      if (ref instanceof java.lang.String) {
886        return (java.lang.String) ref;
887      } else {
888        com.google.protobuf.ByteString bs = 
889            (com.google.protobuf.ByteString) ref;
890        java.lang.String s = bs.toStringUtf8();
891        if (bs.isValidUtf8()) {
892          context_ = s;
893        }
894        return s;
895      }
896    }
897    /**
898     * <code>required string context = 1;</code>
899     */
900    public com.google.protobuf.ByteString
901        getContextBytes() {
902      java.lang.Object ref = context_;
903      if (ref instanceof java.lang.String) {
904        com.google.protobuf.ByteString b = 
905            com.google.protobuf.ByteString.copyFromUtf8(
906                (java.lang.String) ref);
907        context_ = b;
908        return b;
909      } else {
910        return (com.google.protobuf.ByteString) ref;
911      }
912    }
913
914    // optional bytes signature = 2;
915    public static final int SIGNATURE_FIELD_NUMBER = 2;
916    private com.google.protobuf.ByteString signature_;
917    /**
918     * <code>optional bytes signature = 2;</code>
919     */
920    public boolean hasSignature() {
921      return ((bitField0_ & 0x00000002) == 0x00000002);
922    }
923    /**
924     * <code>optional bytes signature = 2;</code>
925     */
926    public com.google.protobuf.ByteString getSignature() {
927      return signature_;
928    }
929
930    private void initFields() {
931      context_ = "";
932      signature_ = com.google.protobuf.ByteString.EMPTY;
933    }
934    private byte memoizedIsInitialized = -1;
935    public final boolean isInitialized() {
936      byte isInitialized = memoizedIsInitialized;
937      if (isInitialized != -1) return isInitialized == 1;
938
939      if (!hasContext()) {
940        memoizedIsInitialized = 0;
941        return false;
942      }
943      memoizedIsInitialized = 1;
944      return true;
945    }
946
947    public void writeTo(com.google.protobuf.CodedOutputStream output)
948                        throws java.io.IOException {
949      getSerializedSize();
950      if (((bitField0_ & 0x00000001) == 0x00000001)) {
951        output.writeBytes(1, getContextBytes());
952      }
953      if (((bitField0_ & 0x00000002) == 0x00000002)) {
954        output.writeBytes(2, signature_);
955      }
956      getUnknownFields().writeTo(output);
957    }
958
959    private int memoizedSerializedSize = -1;
960    public int getSerializedSize() {
961      int size = memoizedSerializedSize;
962      if (size != -1) return size;
963
964      size = 0;
965      if (((bitField0_ & 0x00000001) == 0x00000001)) {
966        size += com.google.protobuf.CodedOutputStream
967          .computeBytesSize(1, getContextBytes());
968      }
969      if (((bitField0_ & 0x00000002) == 0x00000002)) {
970        size += com.google.protobuf.CodedOutputStream
971          .computeBytesSize(2, signature_);
972      }
973      size += getUnknownFields().getSerializedSize();
974      memoizedSerializedSize = size;
975      return size;
976    }
977
978    private static final long serialVersionUID = 0L;
979    @java.lang.Override
980    protected java.lang.Object writeReplace()
981        throws java.io.ObjectStreamException {
982      return super.writeReplace();
983    }
984
985    @java.lang.Override
986    public boolean equals(final java.lang.Object obj) {
987      if (obj == this) {
988       return true;
989      }
990      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto)) {
991        return super.equals(obj);
992      }
993      org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto other = (org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto) obj;
994
995      boolean result = true;
996      result = result && (hasContext() == other.hasContext());
997      if (hasContext()) {
998        result = result && getContext()
999            .equals(other.getContext());
1000      }
1001      result = result && (hasSignature() == other.hasSignature());
1002      if (hasSignature()) {
1003        result = result && getSignature()
1004            .equals(other.getSignature());
1005      }
1006      result = result &&
1007          getUnknownFields().equals(other.getUnknownFields());
1008      return result;
1009    }
1010
1011    private int memoizedHashCode = 0;
1012    @java.lang.Override
1013    public int hashCode() {
1014      if (memoizedHashCode != 0) {
1015        return memoizedHashCode;
1016      }
1017      int hash = 41;
1018      hash = (19 * hash) + getDescriptorForType().hashCode();
1019      if (hasContext()) {
1020        hash = (37 * hash) + CONTEXT_FIELD_NUMBER;
1021        hash = (53 * hash) + getContext().hashCode();
1022      }
1023      if (hasSignature()) {
1024        hash = (37 * hash) + SIGNATURE_FIELD_NUMBER;
1025        hash = (53 * hash) + getSignature().hashCode();
1026      }
1027      hash = (29 * hash) + getUnknownFields().hashCode();
1028      memoizedHashCode = hash;
1029      return hash;
1030    }
1031
1032    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto parseFrom(
1033        com.google.protobuf.ByteString data)
1034        throws com.google.protobuf.InvalidProtocolBufferException {
1035      return PARSER.parseFrom(data);
1036    }
1037    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto parseFrom(
1038        com.google.protobuf.ByteString data,
1039        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1040        throws com.google.protobuf.InvalidProtocolBufferException {
1041      return PARSER.parseFrom(data, extensionRegistry);
1042    }
1043    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto parseFrom(byte[] data)
1044        throws com.google.protobuf.InvalidProtocolBufferException {
1045      return PARSER.parseFrom(data);
1046    }
1047    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto parseFrom(
1048        byte[] data,
1049        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1050        throws com.google.protobuf.InvalidProtocolBufferException {
1051      return PARSER.parseFrom(data, extensionRegistry);
1052    }
1053    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto parseFrom(java.io.InputStream input)
1054        throws java.io.IOException {
1055      return PARSER.parseFrom(input);
1056    }
1057    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto parseFrom(
1058        java.io.InputStream input,
1059        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1060        throws java.io.IOException {
1061      return PARSER.parseFrom(input, extensionRegistry);
1062    }
1063    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto parseDelimitedFrom(java.io.InputStream input)
1064        throws java.io.IOException {
1065      return PARSER.parseDelimitedFrom(input);
1066    }
1067    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto parseDelimitedFrom(
1068        java.io.InputStream input,
1069        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1070        throws java.io.IOException {
1071      return PARSER.parseDelimitedFrom(input, extensionRegistry);
1072    }
1073    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto parseFrom(
1074        com.google.protobuf.CodedInputStream input)
1075        throws java.io.IOException {
1076      return PARSER.parseFrom(input);
1077    }
1078    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto parseFrom(
1079        com.google.protobuf.CodedInputStream input,
1080        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1081        throws java.io.IOException {
1082      return PARSER.parseFrom(input, extensionRegistry);
1083    }
1084
1085    public static Builder newBuilder() { return Builder.create(); }
1086    public Builder newBuilderForType() { return newBuilder(); }
1087    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto prototype) {
1088      return newBuilder().mergeFrom(prototype);
1089    }
1090    public Builder toBuilder() { return newBuilder(this); }
1091
1092    @java.lang.Override
1093    protected Builder newBuilderForType(
1094        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1095      Builder builder = new Builder(parent);
1096      return builder;
1097    }
1098    /**
1099     * Protobuf type {@code hadoop.common.RPCCallerContextProto}
1100     *
1101     * <pre>
1102     **
1103     * Used to pass through the call context entry after an RPC is made.
1104     * </pre>
1105     */
1106    public static final class Builder extends
1107        com.google.protobuf.GeneratedMessage.Builder<Builder>
1108       implements org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProtoOrBuilder {
1109      public static final com.google.protobuf.Descriptors.Descriptor
1110          getDescriptor() {
1111        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RPCCallerContextProto_descriptor;
1112      }
1113
1114      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1115          internalGetFieldAccessorTable() {
1116        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RPCCallerContextProto_fieldAccessorTable
1117            .ensureFieldAccessorsInitialized(
1118                org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.Builder.class);
1119      }
1120
1121      // Construct using org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.newBuilder()
1122      private Builder() {
1123        maybeForceBuilderInitialization();
1124      }
1125
1126      private Builder(
1127          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1128        super(parent);
1129        maybeForceBuilderInitialization();
1130      }
1131      private void maybeForceBuilderInitialization() {
1132        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
1133        }
1134      }
1135      private static Builder create() {
1136        return new Builder();
1137      }
1138
1139      public Builder clear() {
1140        super.clear();
1141        context_ = "";
1142        bitField0_ = (bitField0_ & ~0x00000001);
1143        signature_ = com.google.protobuf.ByteString.EMPTY;
1144        bitField0_ = (bitField0_ & ~0x00000002);
1145        return this;
1146      }
1147
1148      public Builder clone() {
1149        return create().mergeFrom(buildPartial());
1150      }
1151
1152      public com.google.protobuf.Descriptors.Descriptor
1153          getDescriptorForType() {
1154        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RPCCallerContextProto_descriptor;
1155      }
1156
1157      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto getDefaultInstanceForType() {
1158        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.getDefaultInstance();
1159      }
1160
1161      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto build() {
1162        org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto result = buildPartial();
1163        if (!result.isInitialized()) {
1164          throw newUninitializedMessageException(result);
1165        }
1166        return result;
1167      }
1168
1169      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto buildPartial() {
1170        org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto result = new org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto(this);
1171        int from_bitField0_ = bitField0_;
1172        int to_bitField0_ = 0;
1173        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
1174          to_bitField0_ |= 0x00000001;
1175        }
1176        result.context_ = context_;
1177        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
1178          to_bitField0_ |= 0x00000002;
1179        }
1180        result.signature_ = signature_;
1181        result.bitField0_ = to_bitField0_;
1182        onBuilt();
1183        return result;
1184      }
1185
1186      public Builder mergeFrom(com.google.protobuf.Message other) {
1187        if (other instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto) {
1188          return mergeFrom((org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto)other);
1189        } else {
1190          super.mergeFrom(other);
1191          return this;
1192        }
1193      }
1194
1195      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto other) {
1196        if (other == org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.getDefaultInstance()) return this;
1197        if (other.hasContext()) {
1198          bitField0_ |= 0x00000001;
1199          context_ = other.context_;
1200          onChanged();
1201        }
1202        if (other.hasSignature()) {
1203          setSignature(other.getSignature());
1204        }
1205        this.mergeUnknownFields(other.getUnknownFields());
1206        return this;
1207      }
1208
1209      public final boolean isInitialized() {
1210        if (!hasContext()) {
1211          
1212          return false;
1213        }
1214        return true;
1215      }
1216
1217      public Builder mergeFrom(
1218          com.google.protobuf.CodedInputStream input,
1219          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1220          throws java.io.IOException {
1221        org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto parsedMessage = null;
1222        try {
1223          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
1224        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1225          parsedMessage = (org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto) e.getUnfinishedMessage();
1226          throw e;
1227        } finally {
1228          if (parsedMessage != null) {
1229            mergeFrom(parsedMessage);
1230          }
1231        }
1232        return this;
1233      }
1234      private int bitField0_;
1235
1236      // required string context = 1;
1237      private java.lang.Object context_ = "";
1238      /**
1239       * <code>required string context = 1;</code>
1240       */
1241      public boolean hasContext() {
1242        return ((bitField0_ & 0x00000001) == 0x00000001);
1243      }
1244      /**
1245       * <code>required string context = 1;</code>
1246       */
1247      public java.lang.String getContext() {
1248        java.lang.Object ref = context_;
1249        if (!(ref instanceof java.lang.String)) {
1250          java.lang.String s = ((com.google.protobuf.ByteString) ref)
1251              .toStringUtf8();
1252          context_ = s;
1253          return s;
1254        } else {
1255          return (java.lang.String) ref;
1256        }
1257      }
1258      /**
1259       * <code>required string context = 1;</code>
1260       */
1261      public com.google.protobuf.ByteString
1262          getContextBytes() {
1263        java.lang.Object ref = context_;
1264        if (ref instanceof String) {
1265          com.google.protobuf.ByteString b = 
1266              com.google.protobuf.ByteString.copyFromUtf8(
1267                  (java.lang.String) ref);
1268          context_ = b;
1269          return b;
1270        } else {
1271          return (com.google.protobuf.ByteString) ref;
1272        }
1273      }
1274      /**
1275       * <code>required string context = 1;</code>
1276       */
1277      public Builder setContext(
1278          java.lang.String value) {
1279        if (value == null) {
1280    throw new NullPointerException();
1281  }
1282  bitField0_ |= 0x00000001;
1283        context_ = value;
1284        onChanged();
1285        return this;
1286      }
1287      /**
1288       * <code>required string context = 1;</code>
1289       */
1290      public Builder clearContext() {
1291        bitField0_ = (bitField0_ & ~0x00000001);
1292        context_ = getDefaultInstance().getContext();
1293        onChanged();
1294        return this;
1295      }
1296      /**
1297       * <code>required string context = 1;</code>
1298       */
1299      public Builder setContextBytes(
1300          com.google.protobuf.ByteString value) {
1301        if (value == null) {
1302    throw new NullPointerException();
1303  }
1304  bitField0_ |= 0x00000001;
1305        context_ = value;
1306        onChanged();
1307        return this;
1308      }
1309
1310      // optional bytes signature = 2;
1311      private com.google.protobuf.ByteString signature_ = com.google.protobuf.ByteString.EMPTY;
1312      /**
1313       * <code>optional bytes signature = 2;</code>
1314       */
1315      public boolean hasSignature() {
1316        return ((bitField0_ & 0x00000002) == 0x00000002);
1317      }
1318      /**
1319       * <code>optional bytes signature = 2;</code>
1320       */
1321      public com.google.protobuf.ByteString getSignature() {
1322        return signature_;
1323      }
1324      /**
1325       * <code>optional bytes signature = 2;</code>
1326       */
1327      public Builder setSignature(com.google.protobuf.ByteString value) {
1328        if (value == null) {
1329    throw new NullPointerException();
1330  }
1331  bitField0_ |= 0x00000002;
1332        signature_ = value;
1333        onChanged();
1334        return this;
1335      }
1336      /**
1337       * <code>optional bytes signature = 2;</code>
1338       */
1339      public Builder clearSignature() {
1340        bitField0_ = (bitField0_ & ~0x00000002);
1341        signature_ = getDefaultInstance().getSignature();
1342        onChanged();
1343        return this;
1344      }
1345
1346      // @@protoc_insertion_point(builder_scope:hadoop.common.RPCCallerContextProto)
1347    }
1348
1349    static {
1350      defaultInstance = new RPCCallerContextProto(true);
1351      defaultInstance.initFields();
1352    }
1353
1354    // @@protoc_insertion_point(class_scope:hadoop.common.RPCCallerContextProto)
1355  }
1356
1357  public interface RpcRequestHeaderProtoOrBuilder
1358      extends com.google.protobuf.MessageOrBuilder {
1359
1360    // optional .hadoop.common.RpcKindProto rpcKind = 1;
1361    /**
1362     * <code>optional .hadoop.common.RpcKindProto rpcKind = 1;</code>
1363     */
1364    boolean hasRpcKind();
1365    /**
1366     * <code>optional .hadoop.common.RpcKindProto rpcKind = 1;</code>
1367     */
1368    org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto getRpcKind();
1369
1370    // optional .hadoop.common.RpcRequestHeaderProto.OperationProto rpcOp = 2;
1371    /**
1372     * <code>optional .hadoop.common.RpcRequestHeaderProto.OperationProto rpcOp = 2;</code>
1373     */
1374    boolean hasRpcOp();
1375    /**
1376     * <code>optional .hadoop.common.RpcRequestHeaderProto.OperationProto rpcOp = 2;</code>
1377     */
1378    org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto getRpcOp();
1379
1380    // required sint32 callId = 3;
1381    /**
1382     * <code>required sint32 callId = 3;</code>
1383     *
1384     * <pre>
1385     * a sequence number that is sent back in response
1386     * </pre>
1387     */
1388    boolean hasCallId();
1389    /**
1390     * <code>required sint32 callId = 3;</code>
1391     *
1392     * <pre>
1393     * a sequence number that is sent back in response
1394     * </pre>
1395     */
1396    int getCallId();
1397
1398    // required bytes clientId = 4;
1399    /**
1400     * <code>required bytes clientId = 4;</code>
1401     *
1402     * <pre>
1403     * Globally unique client ID
1404     * </pre>
1405     */
1406    boolean hasClientId();
1407    /**
1408     * <code>required bytes clientId = 4;</code>
1409     *
1410     * <pre>
1411     * Globally unique client ID
1412     * </pre>
1413     */
1414    com.google.protobuf.ByteString getClientId();
1415
1416    // optional sint32 retryCount = 5 [default = -1];
1417    /**
1418     * <code>optional sint32 retryCount = 5 [default = -1];</code>
1419     *
1420     * <pre>
1421     * clientId + callId uniquely identifies a request
1422     * retry count, 1 means this is the first retry
1423     * </pre>
1424     */
1425    boolean hasRetryCount();
1426    /**
1427     * <code>optional sint32 retryCount = 5 [default = -1];</code>
1428     *
1429     * <pre>
1430     * clientId + callId uniquely identifies a request
1431     * retry count, 1 means this is the first retry
1432     * </pre>
1433     */
1434    int getRetryCount();
1435
1436    // optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;
1437    /**
1438     * <code>optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;</code>
1439     *
1440     * <pre>
1441     * tracing info
1442     * </pre>
1443     */
1444    boolean hasTraceInfo();
1445    /**
1446     * <code>optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;</code>
1447     *
1448     * <pre>
1449     * tracing info
1450     * </pre>
1451     */
1452    org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto getTraceInfo();
1453    /**
1454     * <code>optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;</code>
1455     *
1456     * <pre>
1457     * tracing info
1458     * </pre>
1459     */
1460    org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProtoOrBuilder getTraceInfoOrBuilder();
1461
1462    // optional .hadoop.common.RPCCallerContextProto callerContext = 7;
1463    /**
1464     * <code>optional .hadoop.common.RPCCallerContextProto callerContext = 7;</code>
1465     *
1466     * <pre>
1467     * call context
1468     * </pre>
1469     */
1470    boolean hasCallerContext();
1471    /**
1472     * <code>optional .hadoop.common.RPCCallerContextProto callerContext = 7;</code>
1473     *
1474     * <pre>
1475     * call context
1476     * </pre>
1477     */
1478    org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto getCallerContext();
1479    /**
1480     * <code>optional .hadoop.common.RPCCallerContextProto callerContext = 7;</code>
1481     *
1482     * <pre>
1483     * call context
1484     * </pre>
1485     */
1486    org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProtoOrBuilder getCallerContextOrBuilder();
1487  }
1488  /**
1489   * Protobuf type {@code hadoop.common.RpcRequestHeaderProto}
1490   *
1491   * <pre>
1492   * the header for the RpcRequest
1493   * </pre>
1494   */
1495  public static final class RpcRequestHeaderProto extends
1496      com.google.protobuf.GeneratedMessage
1497      implements RpcRequestHeaderProtoOrBuilder {
1498    // Use RpcRequestHeaderProto.newBuilder() to construct.
1499    private RpcRequestHeaderProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
1500      super(builder);
1501      this.unknownFields = builder.getUnknownFields();
1502    }
1503    private RpcRequestHeaderProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
1504
1505    private static final RpcRequestHeaderProto defaultInstance;
1506    public static RpcRequestHeaderProto getDefaultInstance() {
1507      return defaultInstance;
1508    }
1509
1510    public RpcRequestHeaderProto getDefaultInstanceForType() {
1511      return defaultInstance;
1512    }
1513
1514    private final com.google.protobuf.UnknownFieldSet unknownFields;
1515    @java.lang.Override
1516    public final com.google.protobuf.UnknownFieldSet
1517        getUnknownFields() {
1518      return this.unknownFields;
1519    }
1520    private RpcRequestHeaderProto(
1521        com.google.protobuf.CodedInputStream input,
1522        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1523        throws com.google.protobuf.InvalidProtocolBufferException {
1524      initFields();
1525      int mutable_bitField0_ = 0;
1526      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
1527          com.google.protobuf.UnknownFieldSet.newBuilder();
1528      try {
1529        boolean done = false;
1530        while (!done) {
1531          int tag = input.readTag();
1532          switch (tag) {
1533            case 0:
1534              done = true;
1535              break;
1536            default: {
1537              if (!parseUnknownField(input, unknownFields,
1538                                     extensionRegistry, tag)) {
1539                done = true;
1540              }
1541              break;
1542            }
1543            case 8: {
1544              int rawValue = input.readEnum();
1545              org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto value = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto.valueOf(rawValue);
1546              if (value == null) {
1547                unknownFields.mergeVarintField(1, rawValue);
1548              } else {
1549                bitField0_ |= 0x00000001;
1550                rpcKind_ = value;
1551              }
1552              break;
1553            }
1554            case 16: {
1555              int rawValue = input.readEnum();
1556              org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto value = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto.valueOf(rawValue);
1557              if (value == null) {
1558                unknownFields.mergeVarintField(2, rawValue);
1559              } else {
1560                bitField0_ |= 0x00000002;
1561                rpcOp_ = value;
1562              }
1563              break;
1564            }
1565            case 24: {
1566              bitField0_ |= 0x00000004;
1567              callId_ = input.readSInt32();
1568              break;
1569            }
1570            case 34: {
1571              bitField0_ |= 0x00000008;
1572              clientId_ = input.readBytes();
1573              break;
1574            }
1575            case 40: {
1576              bitField0_ |= 0x00000010;
1577              retryCount_ = input.readSInt32();
1578              break;
1579            }
1580            case 50: {
1581              org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.Builder subBuilder = null;
1582              if (((bitField0_ & 0x00000020) == 0x00000020)) {
1583                subBuilder = traceInfo_.toBuilder();
1584              }
1585              traceInfo_ = input.readMessage(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.PARSER, extensionRegistry);
1586              if (subBuilder != null) {
1587                subBuilder.mergeFrom(traceInfo_);
1588                traceInfo_ = subBuilder.buildPartial();
1589              }
1590              bitField0_ |= 0x00000020;
1591              break;
1592            }
1593            case 58: {
1594              org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.Builder subBuilder = null;
1595              if (((bitField0_ & 0x00000040) == 0x00000040)) {
1596                subBuilder = callerContext_.toBuilder();
1597              }
1598              callerContext_ = input.readMessage(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.PARSER, extensionRegistry);
1599              if (subBuilder != null) {
1600                subBuilder.mergeFrom(callerContext_);
1601                callerContext_ = subBuilder.buildPartial();
1602              }
1603              bitField0_ |= 0x00000040;
1604              break;
1605            }
1606          }
1607        }
1608      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1609        throw e.setUnfinishedMessage(this);
1610      } catch (java.io.IOException e) {
1611        throw new com.google.protobuf.InvalidProtocolBufferException(
1612            e.getMessage()).setUnfinishedMessage(this);
1613      } finally {
1614        this.unknownFields = unknownFields.build();
1615        makeExtensionsImmutable();
1616      }
1617    }
1618    public static final com.google.protobuf.Descriptors.Descriptor
1619        getDescriptor() {
1620      return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcRequestHeaderProto_descriptor;
1621    }
1622
1623    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1624        internalGetFieldAccessorTable() {
1625      return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcRequestHeaderProto_fieldAccessorTable
1626          .ensureFieldAccessorsInitialized(
1627              org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.Builder.class);
1628    }
1629
1630    public static com.google.protobuf.Parser<RpcRequestHeaderProto> PARSER =
1631        new com.google.protobuf.AbstractParser<RpcRequestHeaderProto>() {
1632      public RpcRequestHeaderProto parsePartialFrom(
1633          com.google.protobuf.CodedInputStream input,
1634          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1635          throws com.google.protobuf.InvalidProtocolBufferException {
1636        return new RpcRequestHeaderProto(input, extensionRegistry);
1637      }
1638    };
1639
1640    @java.lang.Override
1641    public com.google.protobuf.Parser<RpcRequestHeaderProto> getParserForType() {
1642      return PARSER;
1643    }
1644
1645    /**
1646     * Protobuf enum {@code hadoop.common.RpcRequestHeaderProto.OperationProto}
1647     */
1648    public enum OperationProto
1649        implements com.google.protobuf.ProtocolMessageEnum {
1650      /**
1651       * <code>RPC_FINAL_PACKET = 0;</code>
1652       *
1653       * <pre>
1654       * The final RPC Packet
1655       * </pre>
1656       */
1657      RPC_FINAL_PACKET(0, 0),
1658      /**
1659       * <code>RPC_CONTINUATION_PACKET = 1;</code>
1660       *
1661       * <pre>
1662       * not implemented yet
1663       * </pre>
1664       */
1665      RPC_CONTINUATION_PACKET(1, 1),
1666      /**
1667       * <code>RPC_CLOSE_CONNECTION = 2;</code>
1668       *
1669       * <pre>
1670       * close the rpc connection
1671       * </pre>
1672       */
1673      RPC_CLOSE_CONNECTION(2, 2),
1674      ;
1675
1676      /**
1677       * <code>RPC_FINAL_PACKET = 0;</code>
1678       *
1679       * <pre>
1680       * The final RPC Packet
1681       * </pre>
1682       */
1683      public static final int RPC_FINAL_PACKET_VALUE = 0;
1684      /**
1685       * <code>RPC_CONTINUATION_PACKET = 1;</code>
1686       *
1687       * <pre>
1688       * not implemented yet
1689       * </pre>
1690       */
1691      public static final int RPC_CONTINUATION_PACKET_VALUE = 1;
1692      /**
1693       * <code>RPC_CLOSE_CONNECTION = 2;</code>
1694       *
1695       * <pre>
1696       * close the rpc connection
1697       * </pre>
1698       */
1699      public static final int RPC_CLOSE_CONNECTION_VALUE = 2;
1700
1701
1702      public final int getNumber() { return value; }
1703
1704      public static OperationProto valueOf(int value) {
1705        switch (value) {
1706          case 0: return RPC_FINAL_PACKET;
1707          case 1: return RPC_CONTINUATION_PACKET;
1708          case 2: return RPC_CLOSE_CONNECTION;
1709          default: return null;
1710        }
1711      }
1712
1713      public static com.google.protobuf.Internal.EnumLiteMap<OperationProto>
1714          internalGetValueMap() {
1715        return internalValueMap;
1716      }
1717      private static com.google.protobuf.Internal.EnumLiteMap<OperationProto>
1718          internalValueMap =
1719            new com.google.protobuf.Internal.EnumLiteMap<OperationProto>() {
1720              public OperationProto findValueByNumber(int number) {
1721                return OperationProto.valueOf(number);
1722              }
1723            };
1724
1725      public final com.google.protobuf.Descriptors.EnumValueDescriptor
1726          getValueDescriptor() {
1727        return getDescriptor().getValues().get(index);
1728      }
1729      public final com.google.protobuf.Descriptors.EnumDescriptor
1730          getDescriptorForType() {
1731        return getDescriptor();
1732      }
1733      public static final com.google.protobuf.Descriptors.EnumDescriptor
1734          getDescriptor() {
1735        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.getDescriptor().getEnumTypes().get(0);
1736      }
1737
1738      private static final OperationProto[] VALUES = values();
1739
1740      public static OperationProto valueOf(
1741          com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
1742        if (desc.getType() != getDescriptor()) {
1743          throw new java.lang.IllegalArgumentException(
1744            "EnumValueDescriptor is not for this type.");
1745        }
1746        return VALUES[desc.getIndex()];
1747      }
1748
1749      private final int index;
1750      private final int value;
1751
1752      private OperationProto(int index, int value) {
1753        this.index = index;
1754        this.value = value;
1755      }
1756
1757      // @@protoc_insertion_point(enum_scope:hadoop.common.RpcRequestHeaderProto.OperationProto)
1758    }
1759
1760    private int bitField0_;
1761    // optional .hadoop.common.RpcKindProto rpcKind = 1;
1762    public static final int RPCKIND_FIELD_NUMBER = 1;
1763    private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto rpcKind_;
1764    /**
1765     * <code>optional .hadoop.common.RpcKindProto rpcKind = 1;</code>
1766     */
1767    public boolean hasRpcKind() {
1768      return ((bitField0_ & 0x00000001) == 0x00000001);
1769    }
1770    /**
1771     * <code>optional .hadoop.common.RpcKindProto rpcKind = 1;</code>
1772     */
1773    public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto getRpcKind() {
1774      return rpcKind_;
1775    }
1776
1777    // optional .hadoop.common.RpcRequestHeaderProto.OperationProto rpcOp = 2;
1778    public static final int RPCOP_FIELD_NUMBER = 2;
1779    private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto rpcOp_;
1780    /**
1781     * <code>optional .hadoop.common.RpcRequestHeaderProto.OperationProto rpcOp = 2;</code>
1782     */
1783    public boolean hasRpcOp() {
1784      return ((bitField0_ & 0x00000002) == 0x00000002);
1785    }
1786    /**
1787     * <code>optional .hadoop.common.RpcRequestHeaderProto.OperationProto rpcOp = 2;</code>
1788     */
1789    public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto getRpcOp() {
1790      return rpcOp_;
1791    }
1792
1793    // required sint32 callId = 3;
1794    public static final int CALLID_FIELD_NUMBER = 3;
1795    private int callId_;
1796    /**
1797     * <code>required sint32 callId = 3;</code>
1798     *
1799     * <pre>
1800     * a sequence number that is sent back in response
1801     * </pre>
1802     */
1803    public boolean hasCallId() {
1804      return ((bitField0_ & 0x00000004) == 0x00000004);
1805    }
1806    /**
1807     * <code>required sint32 callId = 3;</code>
1808     *
1809     * <pre>
1810     * a sequence number that is sent back in response
1811     * </pre>
1812     */
1813    public int getCallId() {
1814      return callId_;
1815    }
1816
1817    // required bytes clientId = 4;
1818    public static final int CLIENTID_FIELD_NUMBER = 4;
1819    private com.google.protobuf.ByteString clientId_;
1820    /**
1821     * <code>required bytes clientId = 4;</code>
1822     *
1823     * <pre>
1824     * Globally unique client ID
1825     * </pre>
1826     */
1827    public boolean hasClientId() {
1828      return ((bitField0_ & 0x00000008) == 0x00000008);
1829    }
1830    /**
1831     * <code>required bytes clientId = 4;</code>
1832     *
1833     * <pre>
1834     * Globally unique client ID
1835     * </pre>
1836     */
1837    public com.google.protobuf.ByteString getClientId() {
1838      return clientId_;
1839    }
1840
1841    // optional sint32 retryCount = 5 [default = -1];
1842    public static final int RETRYCOUNT_FIELD_NUMBER = 5;
1843    private int retryCount_;
1844    /**
1845     * <code>optional sint32 retryCount = 5 [default = -1];</code>
1846     *
1847     * <pre>
1848     * clientId + callId uniquely identifies a request
1849     * retry count, 1 means this is the first retry
1850     * </pre>
1851     */
1852    public boolean hasRetryCount() {
1853      return ((bitField0_ & 0x00000010) == 0x00000010);
1854    }
1855    /**
1856     * <code>optional sint32 retryCount = 5 [default = -1];</code>
1857     *
1858     * <pre>
1859     * clientId + callId uniquely identifies a request
1860     * retry count, 1 means this is the first retry
1861     * </pre>
1862     */
1863    public int getRetryCount() {
1864      return retryCount_;
1865    }
1866
1867    // optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;
1868    public static final int TRACEINFO_FIELD_NUMBER = 6;
1869    private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto traceInfo_;
1870    /**
1871     * <code>optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;</code>
1872     *
1873     * <pre>
1874     * tracing info
1875     * </pre>
1876     */
1877    public boolean hasTraceInfo() {
1878      return ((bitField0_ & 0x00000020) == 0x00000020);
1879    }
1880    /**
1881     * <code>optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;</code>
1882     *
1883     * <pre>
1884     * tracing info
1885     * </pre>
1886     */
1887    public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto getTraceInfo() {
1888      return traceInfo_;
1889    }
1890    /**
1891     * <code>optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;</code>
1892     *
1893     * <pre>
1894     * tracing info
1895     * </pre>
1896     */
1897    public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProtoOrBuilder getTraceInfoOrBuilder() {
1898      return traceInfo_;
1899    }
1900
1901    // optional .hadoop.common.RPCCallerContextProto callerContext = 7;
1902    public static final int CALLERCONTEXT_FIELD_NUMBER = 7;
1903    private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto callerContext_;
1904    /**
1905     * <code>optional .hadoop.common.RPCCallerContextProto callerContext = 7;</code>
1906     *
1907     * <pre>
1908     * call context
1909     * </pre>
1910     */
1911    public boolean hasCallerContext() {
1912      return ((bitField0_ & 0x00000040) == 0x00000040);
1913    }
1914    /**
1915     * <code>optional .hadoop.common.RPCCallerContextProto callerContext = 7;</code>
1916     *
1917     * <pre>
1918     * call context
1919     * </pre>
1920     */
1921    public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto getCallerContext() {
1922      return callerContext_;
1923    }
1924    /**
1925     * <code>optional .hadoop.common.RPCCallerContextProto callerContext = 7;</code>
1926     *
1927     * <pre>
1928     * call context
1929     * </pre>
1930     */
1931    public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProtoOrBuilder getCallerContextOrBuilder() {
1932      return callerContext_;
1933    }
1934
1935    private void initFields() {
1936      rpcKind_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto.RPC_BUILTIN;
1937      rpcOp_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto.RPC_FINAL_PACKET;
1938      callId_ = 0;
1939      clientId_ = com.google.protobuf.ByteString.EMPTY;
1940      retryCount_ = -1;
1941      traceInfo_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.getDefaultInstance();
1942      callerContext_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.getDefaultInstance();
1943    }
1944    private byte memoizedIsInitialized = -1;
1945    public final boolean isInitialized() {
1946      byte isInitialized = memoizedIsInitialized;
1947      if (isInitialized != -1) return isInitialized == 1;
1948
1949      if (!hasCallId()) {
1950        memoizedIsInitialized = 0;
1951        return false;
1952      }
1953      if (!hasClientId()) {
1954        memoizedIsInitialized = 0;
1955        return false;
1956      }
1957      if (hasCallerContext()) {
1958        if (!getCallerContext().isInitialized()) {
1959          memoizedIsInitialized = 0;
1960          return false;
1961        }
1962      }
1963      memoizedIsInitialized = 1;
1964      return true;
1965    }
1966
1967    public void writeTo(com.google.protobuf.CodedOutputStream output)
1968                        throws java.io.IOException {
1969      getSerializedSize();
1970      if (((bitField0_ & 0x00000001) == 0x00000001)) {
1971        output.writeEnum(1, rpcKind_.getNumber());
1972      }
1973      if (((bitField0_ & 0x00000002) == 0x00000002)) {
1974        output.writeEnum(2, rpcOp_.getNumber());
1975      }
1976      if (((bitField0_ & 0x00000004) == 0x00000004)) {
1977        output.writeSInt32(3, callId_);
1978      }
1979      if (((bitField0_ & 0x00000008) == 0x00000008)) {
1980        output.writeBytes(4, clientId_);
1981      }
1982      if (((bitField0_ & 0x00000010) == 0x00000010)) {
1983        output.writeSInt32(5, retryCount_);
1984      }
1985      if (((bitField0_ & 0x00000020) == 0x00000020)) {
1986        output.writeMessage(6, traceInfo_);
1987      }
1988      if (((bitField0_ & 0x00000040) == 0x00000040)) {
1989        output.writeMessage(7, callerContext_);
1990      }
1991      getUnknownFields().writeTo(output);
1992    }
1993
1994    private int memoizedSerializedSize = -1;
1995    public int getSerializedSize() {
1996      int size = memoizedSerializedSize;
1997      if (size != -1) return size;
1998
1999      size = 0;
2000      if (((bitField0_ & 0x00000001) == 0x00000001)) {
2001        size += com.google.protobuf.CodedOutputStream
2002          .computeEnumSize(1, rpcKind_.getNumber());
2003      }
2004      if (((bitField0_ & 0x00000002) == 0x00000002)) {
2005        size += com.google.protobuf.CodedOutputStream
2006          .computeEnumSize(2, rpcOp_.getNumber());
2007      }
2008      if (((bitField0_ & 0x00000004) == 0x00000004)) {
2009        size += com.google.protobuf.CodedOutputStream
2010          .computeSInt32Size(3, callId_);
2011      }
2012      if (((bitField0_ & 0x00000008) == 0x00000008)) {
2013        size += com.google.protobuf.CodedOutputStream
2014          .computeBytesSize(4, clientId_);
2015      }
2016      if (((bitField0_ & 0x00000010) == 0x00000010)) {
2017        size += com.google.protobuf.CodedOutputStream
2018          .computeSInt32Size(5, retryCount_);
2019      }
2020      if (((bitField0_ & 0x00000020) == 0x00000020)) {
2021        size += com.google.protobuf.CodedOutputStream
2022          .computeMessageSize(6, traceInfo_);
2023      }
2024      if (((bitField0_ & 0x00000040) == 0x00000040)) {
2025        size += com.google.protobuf.CodedOutputStream
2026          .computeMessageSize(7, callerContext_);
2027      }
2028      size += getUnknownFields().getSerializedSize();
2029      memoizedSerializedSize = size;
2030      return size;
2031    }
2032
2033    private static final long serialVersionUID = 0L;
2034    @java.lang.Override
2035    protected java.lang.Object writeReplace()
2036        throws java.io.ObjectStreamException {
2037      return super.writeReplace();
2038    }
2039
2040    @java.lang.Override
2041    public boolean equals(final java.lang.Object obj) {
2042      if (obj == this) {
2043       return true;
2044      }
2045      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto)) {
2046        return super.equals(obj);
2047      }
2048      org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto other = (org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto) obj;
2049
2050      boolean result = true;
2051      result = result && (hasRpcKind() == other.hasRpcKind());
2052      if (hasRpcKind()) {
2053        result = result &&
2054            (getRpcKind() == other.getRpcKind());
2055      }
2056      result = result && (hasRpcOp() == other.hasRpcOp());
2057      if (hasRpcOp()) {
2058        result = result &&
2059            (getRpcOp() == other.getRpcOp());
2060      }
2061      result = result && (hasCallId() == other.hasCallId());
2062      if (hasCallId()) {
2063        result = result && (getCallId()
2064            == other.getCallId());
2065      }
2066      result = result && (hasClientId() == other.hasClientId());
2067      if (hasClientId()) {
2068        result = result && getClientId()
2069            .equals(other.getClientId());
2070      }
2071      result = result && (hasRetryCount() == other.hasRetryCount());
2072      if (hasRetryCount()) {
2073        result = result && (getRetryCount()
2074            == other.getRetryCount());
2075      }
2076      result = result && (hasTraceInfo() == other.hasTraceInfo());
2077      if (hasTraceInfo()) {
2078        result = result && getTraceInfo()
2079            .equals(other.getTraceInfo());
2080      }
2081      result = result && (hasCallerContext() == other.hasCallerContext());
2082      if (hasCallerContext()) {
2083        result = result && getCallerContext()
2084            .equals(other.getCallerContext());
2085      }
2086      result = result &&
2087          getUnknownFields().equals(other.getUnknownFields());
2088      return result;
2089    }
2090
2091    private int memoizedHashCode = 0;
2092    @java.lang.Override
2093    public int hashCode() {
2094      if (memoizedHashCode != 0) {
2095        return memoizedHashCode;
2096      }
2097      int hash = 41;
2098      hash = (19 * hash) + getDescriptorForType().hashCode();
2099      if (hasRpcKind()) {
2100        hash = (37 * hash) + RPCKIND_FIELD_NUMBER;
2101        hash = (53 * hash) + hashEnum(getRpcKind());
2102      }
2103      if (hasRpcOp()) {
2104        hash = (37 * hash) + RPCOP_FIELD_NUMBER;
2105        hash = (53 * hash) + hashEnum(getRpcOp());
2106      }
2107      if (hasCallId()) {
2108        hash = (37 * hash) + CALLID_FIELD_NUMBER;
2109        hash = (53 * hash) + getCallId();
2110      }
2111      if (hasClientId()) {
2112        hash = (37 * hash) + CLIENTID_FIELD_NUMBER;
2113        hash = (53 * hash) + getClientId().hashCode();
2114      }
2115      if (hasRetryCount()) {
2116        hash = (37 * hash) + RETRYCOUNT_FIELD_NUMBER;
2117        hash = (53 * hash) + getRetryCount();
2118      }
2119      if (hasTraceInfo()) {
2120        hash = (37 * hash) + TRACEINFO_FIELD_NUMBER;
2121        hash = (53 * hash) + getTraceInfo().hashCode();
2122      }
2123      if (hasCallerContext()) {
2124        hash = (37 * hash) + CALLERCONTEXT_FIELD_NUMBER;
2125        hash = (53 * hash) + getCallerContext().hashCode();
2126      }
2127      hash = (29 * hash) + getUnknownFields().hashCode();
2128      memoizedHashCode = hash;
2129      return hash;
2130    }
2131
2132    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parseFrom(
2133        com.google.protobuf.ByteString data)
2134        throws com.google.protobuf.InvalidProtocolBufferException {
2135      return PARSER.parseFrom(data);
2136    }
2137    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parseFrom(
2138        com.google.protobuf.ByteString data,
2139        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2140        throws com.google.protobuf.InvalidProtocolBufferException {
2141      return PARSER.parseFrom(data, extensionRegistry);
2142    }
2143    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parseFrom(byte[] data)
2144        throws com.google.protobuf.InvalidProtocolBufferException {
2145      return PARSER.parseFrom(data);
2146    }
2147    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parseFrom(
2148        byte[] data,
2149        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2150        throws com.google.protobuf.InvalidProtocolBufferException {
2151      return PARSER.parseFrom(data, extensionRegistry);
2152    }
2153    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parseFrom(java.io.InputStream input)
2154        throws java.io.IOException {
2155      return PARSER.parseFrom(input);
2156    }
2157    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parseFrom(
2158        java.io.InputStream input,
2159        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2160        throws java.io.IOException {
2161      return PARSER.parseFrom(input, extensionRegistry);
2162    }
2163    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parseDelimitedFrom(java.io.InputStream input)
2164        throws java.io.IOException {
2165      return PARSER.parseDelimitedFrom(input);
2166    }
2167    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parseDelimitedFrom(
2168        java.io.InputStream input,
2169        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2170        throws java.io.IOException {
2171      return PARSER.parseDelimitedFrom(input, extensionRegistry);
2172    }
2173    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parseFrom(
2174        com.google.protobuf.CodedInputStream input)
2175        throws java.io.IOException {
2176      return PARSER.parseFrom(input);
2177    }
2178    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parseFrom(
2179        com.google.protobuf.CodedInputStream input,
2180        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2181        throws java.io.IOException {
2182      return PARSER.parseFrom(input, extensionRegistry);
2183    }
2184
2185    public static Builder newBuilder() { return Builder.create(); }
2186    public Builder newBuilderForType() { return newBuilder(); }
2187    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto prototype) {
2188      return newBuilder().mergeFrom(prototype);
2189    }
2190    public Builder toBuilder() { return newBuilder(this); }
2191
2192    @java.lang.Override
2193    protected Builder newBuilderForType(
2194        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2195      Builder builder = new Builder(parent);
2196      return builder;
2197    }
2198    /**
2199     * Protobuf type {@code hadoop.common.RpcRequestHeaderProto}
2200     *
2201     * <pre>
2202     * the header for the RpcRequest
2203     * </pre>
2204     */
2205    public static final class Builder extends
2206        com.google.protobuf.GeneratedMessage.Builder<Builder>
2207       implements org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProtoOrBuilder {
2208      public static final com.google.protobuf.Descriptors.Descriptor
2209          getDescriptor() {
2210        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcRequestHeaderProto_descriptor;
2211      }
2212
2213      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
2214          internalGetFieldAccessorTable() {
2215        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcRequestHeaderProto_fieldAccessorTable
2216            .ensureFieldAccessorsInitialized(
2217                org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.Builder.class);
2218      }
2219
2220      // Construct using org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.newBuilder()
2221      private Builder() {
2222        maybeForceBuilderInitialization();
2223      }
2224
2225      private Builder(
2226          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2227        super(parent);
2228        maybeForceBuilderInitialization();
2229      }
2230      private void maybeForceBuilderInitialization() {
2231        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
2232          getTraceInfoFieldBuilder();
2233          getCallerContextFieldBuilder();
2234        }
2235      }
2236      private static Builder create() {
2237        return new Builder();
2238      }
2239
2240      public Builder clear() {
2241        super.clear();
2242        rpcKind_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto.RPC_BUILTIN;
2243        bitField0_ = (bitField0_ & ~0x00000001);
2244        rpcOp_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto.RPC_FINAL_PACKET;
2245        bitField0_ = (bitField0_ & ~0x00000002);
2246        callId_ = 0;
2247        bitField0_ = (bitField0_ & ~0x00000004);
2248        clientId_ = com.google.protobuf.ByteString.EMPTY;
2249        bitField0_ = (bitField0_ & ~0x00000008);
2250        retryCount_ = -1;
2251        bitField0_ = (bitField0_ & ~0x00000010);
2252        if (traceInfoBuilder_ == null) {
2253          traceInfo_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.getDefaultInstance();
2254        } else {
2255          traceInfoBuilder_.clear();
2256        }
2257        bitField0_ = (bitField0_ & ~0x00000020);
2258        if (callerContextBuilder_ == null) {
2259          callerContext_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.getDefaultInstance();
2260        } else {
2261          callerContextBuilder_.clear();
2262        }
2263        bitField0_ = (bitField0_ & ~0x00000040);
2264        return this;
2265      }
2266
2267      public Builder clone() {
2268        return create().mergeFrom(buildPartial());
2269      }
2270
2271      public com.google.protobuf.Descriptors.Descriptor
2272          getDescriptorForType() {
2273        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcRequestHeaderProto_descriptor;
2274      }
2275
2276      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto getDefaultInstanceForType() {
2277        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.getDefaultInstance();
2278      }
2279
2280      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto build() {
2281        org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto result = buildPartial();
2282        if (!result.isInitialized()) {
2283          throw newUninitializedMessageException(result);
2284        }
2285        return result;
2286      }
2287
2288      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto buildPartial() {
2289        org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto result = new org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto(this);
2290        int from_bitField0_ = bitField0_;
2291        int to_bitField0_ = 0;
2292        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
2293          to_bitField0_ |= 0x00000001;
2294        }
2295        result.rpcKind_ = rpcKind_;
2296        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
2297          to_bitField0_ |= 0x00000002;
2298        }
2299        result.rpcOp_ = rpcOp_;
2300        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
2301          to_bitField0_ |= 0x00000004;
2302        }
2303        result.callId_ = callId_;
2304        if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
2305          to_bitField0_ |= 0x00000008;
2306        }
2307        result.clientId_ = clientId_;
2308        if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
2309          to_bitField0_ |= 0x00000010;
2310        }
2311        result.retryCount_ = retryCount_;
2312        if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
2313          to_bitField0_ |= 0x00000020;
2314        }
2315        if (traceInfoBuilder_ == null) {
2316          result.traceInfo_ = traceInfo_;
2317        } else {
2318          result.traceInfo_ = traceInfoBuilder_.build();
2319        }
2320        if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
2321          to_bitField0_ |= 0x00000040;
2322        }
2323        if (callerContextBuilder_ == null) {
2324          result.callerContext_ = callerContext_;
2325        } else {
2326          result.callerContext_ = callerContextBuilder_.build();
2327        }
2328        result.bitField0_ = to_bitField0_;
2329        onBuilt();
2330        return result;
2331      }
2332
2333      public Builder mergeFrom(com.google.protobuf.Message other) {
2334        if (other instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto) {
2335          return mergeFrom((org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto)other);
2336        } else {
2337          super.mergeFrom(other);
2338          return this;
2339        }
2340      }
2341
2342      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto other) {
2343        if (other == org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.getDefaultInstance()) return this;
2344        if (other.hasRpcKind()) {
2345          setRpcKind(other.getRpcKind());
2346        }
2347        if (other.hasRpcOp()) {
2348          setRpcOp(other.getRpcOp());
2349        }
2350        if (other.hasCallId()) {
2351          setCallId(other.getCallId());
2352        }
2353        if (other.hasClientId()) {
2354          setClientId(other.getClientId());
2355        }
2356        if (other.hasRetryCount()) {
2357          setRetryCount(other.getRetryCount());
2358        }
2359        if (other.hasTraceInfo()) {
2360          mergeTraceInfo(other.getTraceInfo());
2361        }
2362        if (other.hasCallerContext()) {
2363          mergeCallerContext(other.getCallerContext());
2364        }
2365        this.mergeUnknownFields(other.getUnknownFields());
2366        return this;
2367      }
2368
2369      public final boolean isInitialized() {
2370        if (!hasCallId()) {
2371          
2372          return false;
2373        }
2374        if (!hasClientId()) {
2375          
2376          return false;
2377        }
2378        if (hasCallerContext()) {
2379          if (!getCallerContext().isInitialized()) {
2380            
2381            return false;
2382          }
2383        }
2384        return true;
2385      }
2386
2387      public Builder mergeFrom(
2388          com.google.protobuf.CodedInputStream input,
2389          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2390          throws java.io.IOException {
2391        org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto parsedMessage = null;
2392        try {
2393          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
2394        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
2395          parsedMessage = (org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto) e.getUnfinishedMessage();
2396          throw e;
2397        } finally {
2398          if (parsedMessage != null) {
2399            mergeFrom(parsedMessage);
2400          }
2401        }
2402        return this;
2403      }
2404      private int bitField0_;
2405
2406      // optional .hadoop.common.RpcKindProto rpcKind = 1;
2407      private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto rpcKind_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto.RPC_BUILTIN;
2408      /**
2409       * <code>optional .hadoop.common.RpcKindProto rpcKind = 1;</code>
2410       */
2411      public boolean hasRpcKind() {
2412        return ((bitField0_ & 0x00000001) == 0x00000001);
2413      }
2414      /**
2415       * <code>optional .hadoop.common.RpcKindProto rpcKind = 1;</code>
2416       */
2417      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto getRpcKind() {
2418        return rpcKind_;
2419      }
2420      /**
2421       * <code>optional .hadoop.common.RpcKindProto rpcKind = 1;</code>
2422       */
2423      public Builder setRpcKind(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto value) {
2424        if (value == null) {
2425          throw new NullPointerException();
2426        }
2427        bitField0_ |= 0x00000001;
2428        rpcKind_ = value;
2429        onChanged();
2430        return this;
2431      }
2432      /**
2433       * <code>optional .hadoop.common.RpcKindProto rpcKind = 1;</code>
2434       */
2435      public Builder clearRpcKind() {
2436        bitField0_ = (bitField0_ & ~0x00000001);
2437        rpcKind_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcKindProto.RPC_BUILTIN;
2438        onChanged();
2439        return this;
2440      }
2441
2442      // optional .hadoop.common.RpcRequestHeaderProto.OperationProto rpcOp = 2;
2443      private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto rpcOp_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto.RPC_FINAL_PACKET;
2444      /**
2445       * <code>optional .hadoop.common.RpcRequestHeaderProto.OperationProto rpcOp = 2;</code>
2446       */
2447      public boolean hasRpcOp() {
2448        return ((bitField0_ & 0x00000002) == 0x00000002);
2449      }
2450      /**
2451       * <code>optional .hadoop.common.RpcRequestHeaderProto.OperationProto rpcOp = 2;</code>
2452       */
2453      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto getRpcOp() {
2454        return rpcOp_;
2455      }
2456      /**
2457       * <code>optional .hadoop.common.RpcRequestHeaderProto.OperationProto rpcOp = 2;</code>
2458       */
2459      public Builder setRpcOp(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto value) {
2460        if (value == null) {
2461          throw new NullPointerException();
2462        }
2463        bitField0_ |= 0x00000002;
2464        rpcOp_ = value;
2465        onChanged();
2466        return this;
2467      }
2468      /**
2469       * <code>optional .hadoop.common.RpcRequestHeaderProto.OperationProto rpcOp = 2;</code>
2470       */
2471      public Builder clearRpcOp() {
2472        bitField0_ = (bitField0_ & ~0x00000002);
2473        rpcOp_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto.RPC_FINAL_PACKET;
2474        onChanged();
2475        return this;
2476      }
2477
2478      // required sint32 callId = 3;
2479      private int callId_ ;
2480      /**
2481       * <code>required sint32 callId = 3;</code>
2482       *
2483       * <pre>
2484       * a sequence number that is sent back in response
2485       * </pre>
2486       */
2487      public boolean hasCallId() {
2488        return ((bitField0_ & 0x00000004) == 0x00000004);
2489      }
2490      /**
2491       * <code>required sint32 callId = 3;</code>
2492       *
2493       * <pre>
2494       * a sequence number that is sent back in response
2495       * </pre>
2496       */
2497      public int getCallId() {
2498        return callId_;
2499      }
2500      /**
2501       * <code>required sint32 callId = 3;</code>
2502       *
2503       * <pre>
2504       * a sequence number that is sent back in response
2505       * </pre>
2506       */
2507      public Builder setCallId(int value) {
2508        bitField0_ |= 0x00000004;
2509        callId_ = value;
2510        onChanged();
2511        return this;
2512      }
2513      /**
2514       * <code>required sint32 callId = 3;</code>
2515       *
2516       * <pre>
2517       * a sequence number that is sent back in response
2518       * </pre>
2519       */
2520      public Builder clearCallId() {
2521        bitField0_ = (bitField0_ & ~0x00000004);
2522        callId_ = 0;
2523        onChanged();
2524        return this;
2525      }
2526
2527      // required bytes clientId = 4;
2528      private com.google.protobuf.ByteString clientId_ = com.google.protobuf.ByteString.EMPTY;
2529      /**
2530       * <code>required bytes clientId = 4;</code>
2531       *
2532       * <pre>
2533       * Globally unique client ID
2534       * </pre>
2535       */
2536      public boolean hasClientId() {
2537        return ((bitField0_ & 0x00000008) == 0x00000008);
2538      }
2539      /**
2540       * <code>required bytes clientId = 4;</code>
2541       *
2542       * <pre>
2543       * Globally unique client ID
2544       * </pre>
2545       */
2546      public com.google.protobuf.ByteString getClientId() {
2547        return clientId_;
2548      }
2549      /**
2550       * <code>required bytes clientId = 4;</code>
2551       *
2552       * <pre>
2553       * Globally unique client ID
2554       * </pre>
2555       */
2556      public Builder setClientId(com.google.protobuf.ByteString value) {
2557        if (value == null) {
2558    throw new NullPointerException();
2559  }
2560  bitField0_ |= 0x00000008;
2561        clientId_ = value;
2562        onChanged();
2563        return this;
2564      }
2565      /**
2566       * <code>required bytes clientId = 4;</code>
2567       *
2568       * <pre>
2569       * Globally unique client ID
2570       * </pre>
2571       */
2572      public Builder clearClientId() {
2573        bitField0_ = (bitField0_ & ~0x00000008);
2574        clientId_ = getDefaultInstance().getClientId();
2575        onChanged();
2576        return this;
2577      }
2578
2579      // optional sint32 retryCount = 5 [default = -1];
2580      private int retryCount_ = -1;
2581      /**
2582       * <code>optional sint32 retryCount = 5 [default = -1];</code>
2583       *
2584       * <pre>
2585       * clientId + callId uniquely identifies a request
2586       * retry count, 1 means this is the first retry
2587       * </pre>
2588       */
2589      public boolean hasRetryCount() {
2590        return ((bitField0_ & 0x00000010) == 0x00000010);
2591      }
2592      /**
2593       * <code>optional sint32 retryCount = 5 [default = -1];</code>
2594       *
2595       * <pre>
2596       * clientId + callId uniquely identifies a request
2597       * retry count, 1 means this is the first retry
2598       * </pre>
2599       */
2600      public int getRetryCount() {
2601        return retryCount_;
2602      }
2603      /**
2604       * <code>optional sint32 retryCount = 5 [default = -1];</code>
2605       *
2606       * <pre>
2607       * clientId + callId uniquely identifies a request
2608       * retry count, 1 means this is the first retry
2609       * </pre>
2610       */
2611      public Builder setRetryCount(int value) {
2612        bitField0_ |= 0x00000010;
2613        retryCount_ = value;
2614        onChanged();
2615        return this;
2616      }
2617      /**
2618       * <code>optional sint32 retryCount = 5 [default = -1];</code>
2619       *
2620       * <pre>
2621       * clientId + callId uniquely identifies a request
2622       * retry count, 1 means this is the first retry
2623       * </pre>
2624       */
2625      public Builder clearRetryCount() {
2626        bitField0_ = (bitField0_ & ~0x00000010);
2627        retryCount_ = -1;
2628        onChanged();
2629        return this;
2630      }
2631
2632      // optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;
2633      private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto traceInfo_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.getDefaultInstance();
2634      private com.google.protobuf.SingleFieldBuilder<
2635          org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.Builder, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProtoOrBuilder> traceInfoBuilder_;
2636      /**
2637       * <code>optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;</code>
2638       *
2639       * <pre>
2640       * tracing info
2641       * </pre>
2642       */
2643      public boolean hasTraceInfo() {
2644        return ((bitField0_ & 0x00000020) == 0x00000020);
2645      }
2646      /**
2647       * <code>optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;</code>
2648       *
2649       * <pre>
2650       * tracing info
2651       * </pre>
2652       */
2653      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto getTraceInfo() {
2654        if (traceInfoBuilder_ == null) {
2655          return traceInfo_;
2656        } else {
2657          return traceInfoBuilder_.getMessage();
2658        }
2659      }
2660      /**
2661       * <code>optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;</code>
2662       *
2663       * <pre>
2664       * tracing info
2665       * </pre>
2666       */
2667      public Builder setTraceInfo(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto value) {
2668        if (traceInfoBuilder_ == null) {
2669          if (value == null) {
2670            throw new NullPointerException();
2671          }
2672          traceInfo_ = value;
2673          onChanged();
2674        } else {
2675          traceInfoBuilder_.setMessage(value);
2676        }
2677        bitField0_ |= 0x00000020;
2678        return this;
2679      }
2680      /**
2681       * <code>optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;</code>
2682       *
2683       * <pre>
2684       * tracing info
2685       * </pre>
2686       */
2687      public Builder setTraceInfo(
2688          org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.Builder builderForValue) {
2689        if (traceInfoBuilder_ == null) {
2690          traceInfo_ = builderForValue.build();
2691          onChanged();
2692        } else {
2693          traceInfoBuilder_.setMessage(builderForValue.build());
2694        }
2695        bitField0_ |= 0x00000020;
2696        return this;
2697      }
2698      /**
2699       * <code>optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;</code>
2700       *
2701       * <pre>
2702       * tracing info
2703       * </pre>
2704       */
2705      public Builder mergeTraceInfo(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto value) {
2706        if (traceInfoBuilder_ == null) {
2707          if (((bitField0_ & 0x00000020) == 0x00000020) &&
2708              traceInfo_ != org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.getDefaultInstance()) {
2709            traceInfo_ =
2710              org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.newBuilder(traceInfo_).mergeFrom(value).buildPartial();
2711          } else {
2712            traceInfo_ = value;
2713          }
2714          onChanged();
2715        } else {
2716          traceInfoBuilder_.mergeFrom(value);
2717        }
2718        bitField0_ |= 0x00000020;
2719        return this;
2720      }
2721      /**
2722       * <code>optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;</code>
2723       *
2724       * <pre>
2725       * tracing info
2726       * </pre>
2727       */
2728      public Builder clearTraceInfo() {
2729        if (traceInfoBuilder_ == null) {
2730          traceInfo_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.getDefaultInstance();
2731          onChanged();
2732        } else {
2733          traceInfoBuilder_.clear();
2734        }
2735        bitField0_ = (bitField0_ & ~0x00000020);
2736        return this;
2737      }
2738      /**
2739       * <code>optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;</code>
2740       *
2741       * <pre>
2742       * tracing info
2743       * </pre>
2744       */
2745      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.Builder getTraceInfoBuilder() {
2746        bitField0_ |= 0x00000020;
2747        onChanged();
2748        return getTraceInfoFieldBuilder().getBuilder();
2749      }
2750      /**
2751       * <code>optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;</code>
2752       *
2753       * <pre>
2754       * tracing info
2755       * </pre>
2756       */
2757      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProtoOrBuilder getTraceInfoOrBuilder() {
2758        if (traceInfoBuilder_ != null) {
2759          return traceInfoBuilder_.getMessageOrBuilder();
2760        } else {
2761          return traceInfo_;
2762        }
2763      }
2764      /**
2765       * <code>optional .hadoop.common.RPCTraceInfoProto traceInfo = 6;</code>
2766       *
2767       * <pre>
2768       * tracing info
2769       * </pre>
2770       */
2771      private com.google.protobuf.SingleFieldBuilder<
2772          org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.Builder, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProtoOrBuilder> 
2773          getTraceInfoFieldBuilder() {
2774        if (traceInfoBuilder_ == null) {
2775          traceInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder<
2776              org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto.Builder, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProtoOrBuilder>(
2777                  traceInfo_,
2778                  getParentForChildren(),
2779                  isClean());
2780          traceInfo_ = null;
2781        }
2782        return traceInfoBuilder_;
2783      }
2784
2785      // optional .hadoop.common.RPCCallerContextProto callerContext = 7;
2786      private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto callerContext_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.getDefaultInstance();
2787      private com.google.protobuf.SingleFieldBuilder<
2788          org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.Builder, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProtoOrBuilder> callerContextBuilder_;
2789      /**
2790       * <code>optional .hadoop.common.RPCCallerContextProto callerContext = 7;</code>
2791       *
2792       * <pre>
2793       * call context
2794       * </pre>
2795       */
2796      public boolean hasCallerContext() {
2797        return ((bitField0_ & 0x00000040) == 0x00000040);
2798      }
2799      /**
2800       * <code>optional .hadoop.common.RPCCallerContextProto callerContext = 7;</code>
2801       *
2802       * <pre>
2803       * call context
2804       * </pre>
2805       */
2806      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto getCallerContext() {
2807        if (callerContextBuilder_ == null) {
2808          return callerContext_;
2809        } else {
2810          return callerContextBuilder_.getMessage();
2811        }
2812      }
2813      /**
2814       * <code>optional .hadoop.common.RPCCallerContextProto callerContext = 7;</code>
2815       *
2816       * <pre>
2817       * call context
2818       * </pre>
2819       */
2820      public Builder setCallerContext(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto value) {
2821        if (callerContextBuilder_ == null) {
2822          if (value == null) {
2823            throw new NullPointerException();
2824          }
2825          callerContext_ = value;
2826          onChanged();
2827        } else {
2828          callerContextBuilder_.setMessage(value);
2829        }
2830        bitField0_ |= 0x00000040;
2831        return this;
2832      }
2833      /**
2834       * <code>optional .hadoop.common.RPCCallerContextProto callerContext = 7;</code>
2835       *
2836       * <pre>
2837       * call context
2838       * </pre>
2839       */
2840      public Builder setCallerContext(
2841          org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.Builder builderForValue) {
2842        if (callerContextBuilder_ == null) {
2843          callerContext_ = builderForValue.build();
2844          onChanged();
2845        } else {
2846          callerContextBuilder_.setMessage(builderForValue.build());
2847        }
2848        bitField0_ |= 0x00000040;
2849        return this;
2850      }
2851      /**
2852       * <code>optional .hadoop.common.RPCCallerContextProto callerContext = 7;</code>
2853       *
2854       * <pre>
2855       * call context
2856       * </pre>
2857       */
2858      public Builder mergeCallerContext(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto value) {
2859        if (callerContextBuilder_ == null) {
2860          if (((bitField0_ & 0x00000040) == 0x00000040) &&
2861              callerContext_ != org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.getDefaultInstance()) {
2862            callerContext_ =
2863              org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.newBuilder(callerContext_).mergeFrom(value).buildPartial();
2864          } else {
2865            callerContext_ = value;
2866          }
2867          onChanged();
2868        } else {
2869          callerContextBuilder_.mergeFrom(value);
2870        }
2871        bitField0_ |= 0x00000040;
2872        return this;
2873      }
2874      /**
2875       * <code>optional .hadoop.common.RPCCallerContextProto callerContext = 7;</code>
2876       *
2877       * <pre>
2878       * call context
2879       * </pre>
2880       */
2881      public Builder clearCallerContext() {
2882        if (callerContextBuilder_ == null) {
2883          callerContext_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.getDefaultInstance();
2884          onChanged();
2885        } else {
2886          callerContextBuilder_.clear();
2887        }
2888        bitField0_ = (bitField0_ & ~0x00000040);
2889        return this;
2890      }
2891      /**
2892       * <code>optional .hadoop.common.RPCCallerContextProto callerContext = 7;</code>
2893       *
2894       * <pre>
2895       * call context
2896       * </pre>
2897       */
2898      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.Builder getCallerContextBuilder() {
2899        bitField0_ |= 0x00000040;
2900        onChanged();
2901        return getCallerContextFieldBuilder().getBuilder();
2902      }
2903      /**
2904       * <code>optional .hadoop.common.RPCCallerContextProto callerContext = 7;</code>
2905       *
2906       * <pre>
2907       * call context
2908       * </pre>
2909       */
2910      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProtoOrBuilder getCallerContextOrBuilder() {
2911        if (callerContextBuilder_ != null) {
2912          return callerContextBuilder_.getMessageOrBuilder();
2913        } else {
2914          return callerContext_;
2915        }
2916      }
2917      /**
2918       * <code>optional .hadoop.common.RPCCallerContextProto callerContext = 7;</code>
2919       *
2920       * <pre>
2921       * call context
2922       * </pre>
2923       */
2924      private com.google.protobuf.SingleFieldBuilder<
2925          org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.Builder, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProtoOrBuilder> 
2926          getCallerContextFieldBuilder() {
2927        if (callerContextBuilder_ == null) {
2928          callerContextBuilder_ = new com.google.protobuf.SingleFieldBuilder<
2929              org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProto.Builder, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCCallerContextProtoOrBuilder>(
2930                  callerContext_,
2931                  getParentForChildren(),
2932                  isClean());
2933          callerContext_ = null;
2934        }
2935        return callerContextBuilder_;
2936      }
2937
2938      // @@protoc_insertion_point(builder_scope:hadoop.common.RpcRequestHeaderProto)
2939    }
2940
2941    static {
2942      defaultInstance = new RpcRequestHeaderProto(true);
2943      defaultInstance.initFields();
2944    }
2945
2946    // @@protoc_insertion_point(class_scope:hadoop.common.RpcRequestHeaderProto)
2947  }
2948
2949  public interface RpcResponseHeaderProtoOrBuilder
2950      extends com.google.protobuf.MessageOrBuilder {
2951
2952    // required uint32 callId = 1;
2953    /**
2954     * <code>required uint32 callId = 1;</code>
2955     *
2956     * <pre>
2957     * callId used in Request
2958     * </pre>
2959     */
2960    boolean hasCallId();
2961    /**
2962     * <code>required uint32 callId = 1;</code>
2963     *
2964     * <pre>
2965     * callId used in Request
2966     * </pre>
2967     */
2968    int getCallId();
2969
2970    // required .hadoop.common.RpcResponseHeaderProto.RpcStatusProto status = 2;
2971    /**
2972     * <code>required .hadoop.common.RpcResponseHeaderProto.RpcStatusProto status = 2;</code>
2973     */
2974    boolean hasStatus();
2975    /**
2976     * <code>required .hadoop.common.RpcResponseHeaderProto.RpcStatusProto status = 2;</code>
2977     */
2978    org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto getStatus();
2979
2980    // optional uint32 serverIpcVersionNum = 3;
2981    /**
2982     * <code>optional uint32 serverIpcVersionNum = 3;</code>
2983     *
2984     * <pre>
2985     * Sent if success or fail
2986     * </pre>
2987     */
2988    boolean hasServerIpcVersionNum();
2989    /**
2990     * <code>optional uint32 serverIpcVersionNum = 3;</code>
2991     *
2992     * <pre>
2993     * Sent if success or fail
2994     * </pre>
2995     */
2996    int getServerIpcVersionNum();
2997
2998    // optional string exceptionClassName = 4;
2999    /**
3000     * <code>optional string exceptionClassName = 4;</code>
3001     *
3002     * <pre>
3003     * if request fails
3004     * </pre>
3005     */
3006    boolean hasExceptionClassName();
3007    /**
3008     * <code>optional string exceptionClassName = 4;</code>
3009     *
3010     * <pre>
3011     * if request fails
3012     * </pre>
3013     */
3014    java.lang.String getExceptionClassName();
3015    /**
3016     * <code>optional string exceptionClassName = 4;</code>
3017     *
3018     * <pre>
3019     * if request fails
3020     * </pre>
3021     */
3022    com.google.protobuf.ByteString
3023        getExceptionClassNameBytes();
3024
3025    // optional string errorMsg = 5;
3026    /**
3027     * <code>optional string errorMsg = 5;</code>
3028     *
3029     * <pre>
3030     * if request fails, often contains strack trace
3031     * </pre>
3032     */
3033    boolean hasErrorMsg();
3034    /**
3035     * <code>optional string errorMsg = 5;</code>
3036     *
3037     * <pre>
3038     * if request fails, often contains strack trace
3039     * </pre>
3040     */
3041    java.lang.String getErrorMsg();
3042    /**
3043     * <code>optional string errorMsg = 5;</code>
3044     *
3045     * <pre>
3046     * if request fails, often contains strack trace
3047     * </pre>
3048     */
3049    com.google.protobuf.ByteString
3050        getErrorMsgBytes();
3051
3052    // optional .hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail = 6;
3053    /**
3054     * <code>optional .hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail = 6;</code>
3055     *
3056     * <pre>
3057     * in case of error
3058     * </pre>
3059     */
3060    boolean hasErrorDetail();
3061    /**
3062     * <code>optional .hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail = 6;</code>
3063     *
3064     * <pre>
3065     * in case of error
3066     * </pre>
3067     */
3068    org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto getErrorDetail();
3069
3070    // optional bytes clientId = 7;
3071    /**
3072     * <code>optional bytes clientId = 7;</code>
3073     *
3074     * <pre>
3075     * Globally unique client ID
3076     * </pre>
3077     */
3078    boolean hasClientId();
3079    /**
3080     * <code>optional bytes clientId = 7;</code>
3081     *
3082     * <pre>
3083     * Globally unique client ID
3084     * </pre>
3085     */
3086    com.google.protobuf.ByteString getClientId();
3087
3088    // optional sint32 retryCount = 8 [default = -1];
3089    /**
3090     * <code>optional sint32 retryCount = 8 [default = -1];</code>
3091     */
3092    boolean hasRetryCount();
3093    /**
3094     * <code>optional sint32 retryCount = 8 [default = -1];</code>
3095     */
3096    int getRetryCount();
3097  }
3098  /**
3099   * Protobuf type {@code hadoop.common.RpcResponseHeaderProto}
3100   *
3101   * <pre>
3102   **
3103   * Rpc Response Header
3104   * +------------------------------------------------------------------+
3105   * | Rpc total response length in bytes (4 bytes int)                 |
3106   * |  (sum of next two parts)                                         |
3107   * +------------------------------------------------------------------+
3108   * | RpcResponseHeaderProto - serialized delimited ie has len         |
3109   * +------------------------------------------------------------------+
3110   * | if request is successful:                                        |
3111   * |   - RpcResponse -  The actual rpc response  bytes follow         |
3112   * |     the response header                                          |
3113   * |     This response is serialized based on RpcKindProto            |
3114   * | if request fails :                                               |
3115   * |   The rpc response header contains the necessary info            |
3116   * +------------------------------------------------------------------+
3117   *
3118   * Note that rpc response header is also used when connection setup fails. 
3119   * Ie the response looks like a rpc response with a fake callId.
3120   * </pre>
3121   */
3122  public static final class RpcResponseHeaderProto extends
3123      com.google.protobuf.GeneratedMessage
3124      implements RpcResponseHeaderProtoOrBuilder {
3125    // Use RpcResponseHeaderProto.newBuilder() to construct.
3126    private RpcResponseHeaderProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
3127      super(builder);
3128      this.unknownFields = builder.getUnknownFields();
3129    }
3130    private RpcResponseHeaderProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
3131
3132    private static final RpcResponseHeaderProto defaultInstance;
3133    public static RpcResponseHeaderProto getDefaultInstance() {
3134      return defaultInstance;
3135    }
3136
3137    public RpcResponseHeaderProto getDefaultInstanceForType() {
3138      return defaultInstance;
3139    }
3140
3141    private final com.google.protobuf.UnknownFieldSet unknownFields;
3142    @java.lang.Override
3143    public final com.google.protobuf.UnknownFieldSet
3144        getUnknownFields() {
3145      return this.unknownFields;
3146    }
3147    private RpcResponseHeaderProto(
3148        com.google.protobuf.CodedInputStream input,
3149        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3150        throws com.google.protobuf.InvalidProtocolBufferException {
3151      initFields();
3152      int mutable_bitField0_ = 0;
3153      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
3154          com.google.protobuf.UnknownFieldSet.newBuilder();
3155      try {
3156        boolean done = false;
3157        while (!done) {
3158          int tag = input.readTag();
3159          switch (tag) {
3160            case 0:
3161              done = true;
3162              break;
3163            default: {
3164              if (!parseUnknownField(input, unknownFields,
3165                                     extensionRegistry, tag)) {
3166                done = true;
3167              }
3168              break;
3169            }
3170            case 8: {
3171              bitField0_ |= 0x00000001;
3172              callId_ = input.readUInt32();
3173              break;
3174            }
3175            case 16: {
3176              int rawValue = input.readEnum();
3177              org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto value = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto.valueOf(rawValue);
3178              if (value == null) {
3179                unknownFields.mergeVarintField(2, rawValue);
3180              } else {
3181                bitField0_ |= 0x00000002;
3182                status_ = value;
3183              }
3184              break;
3185            }
3186            case 24: {
3187              bitField0_ |= 0x00000004;
3188              serverIpcVersionNum_ = input.readUInt32();
3189              break;
3190            }
3191            case 34: {
3192              bitField0_ |= 0x00000008;
3193              exceptionClassName_ = input.readBytes();
3194              break;
3195            }
3196            case 42: {
3197              bitField0_ |= 0x00000010;
3198              errorMsg_ = input.readBytes();
3199              break;
3200            }
3201            case 48: {
3202              int rawValue = input.readEnum();
3203              org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto value = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto.valueOf(rawValue);
3204              if (value == null) {
3205                unknownFields.mergeVarintField(6, rawValue);
3206              } else {
3207                bitField0_ |= 0x00000020;
3208                errorDetail_ = value;
3209              }
3210              break;
3211            }
3212            case 58: {
3213              bitField0_ |= 0x00000040;
3214              clientId_ = input.readBytes();
3215              break;
3216            }
3217            case 64: {
3218              bitField0_ |= 0x00000080;
3219              retryCount_ = input.readSInt32();
3220              break;
3221            }
3222          }
3223        }
3224      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
3225        throw e.setUnfinishedMessage(this);
3226      } catch (java.io.IOException e) {
3227        throw new com.google.protobuf.InvalidProtocolBufferException(
3228            e.getMessage()).setUnfinishedMessage(this);
3229      } finally {
3230        this.unknownFields = unknownFields.build();
3231        makeExtensionsImmutable();
3232      }
3233    }
3234    public static final com.google.protobuf.Descriptors.Descriptor
3235        getDescriptor() {
3236      return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcResponseHeaderProto_descriptor;
3237    }
3238
3239    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
3240        internalGetFieldAccessorTable() {
3241      return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcResponseHeaderProto_fieldAccessorTable
3242          .ensureFieldAccessorsInitialized(
3243              org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.Builder.class);
3244    }
3245
3246    public static com.google.protobuf.Parser<RpcResponseHeaderProto> PARSER =
3247        new com.google.protobuf.AbstractParser<RpcResponseHeaderProto>() {
3248      public RpcResponseHeaderProto parsePartialFrom(
3249          com.google.protobuf.CodedInputStream input,
3250          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3251          throws com.google.protobuf.InvalidProtocolBufferException {
3252        return new RpcResponseHeaderProto(input, extensionRegistry);
3253      }
3254    };
3255
3256    @java.lang.Override
3257    public com.google.protobuf.Parser<RpcResponseHeaderProto> getParserForType() {
3258      return PARSER;
3259    }
3260
3261    /**
3262     * Protobuf enum {@code hadoop.common.RpcResponseHeaderProto.RpcStatusProto}
3263     */
3264    public enum RpcStatusProto
3265        implements com.google.protobuf.ProtocolMessageEnum {
3266      /**
3267       * <code>SUCCESS = 0;</code>
3268       *
3269       * <pre>
3270       * RPC succeeded
3271       * </pre>
3272       */
3273      SUCCESS(0, 0),
3274      /**
3275       * <code>ERROR = 1;</code>
3276       *
3277       * <pre>
3278       * RPC or error - connection left open for future calls
3279       * </pre>
3280       */
3281      ERROR(1, 1),
3282      /**
3283       * <code>FATAL = 2;</code>
3284       *
3285       * <pre>
3286       * Fatal error - connection closed
3287       * </pre>
3288       */
3289      FATAL(2, 2),
3290      ;
3291
3292      /**
3293       * <code>SUCCESS = 0;</code>
3294       *
3295       * <pre>
3296       * RPC succeeded
3297       * </pre>
3298       */
3299      public static final int SUCCESS_VALUE = 0;
3300      /**
3301       * <code>ERROR = 1;</code>
3302       *
3303       * <pre>
3304       * RPC or error - connection left open for future calls
3305       * </pre>
3306       */
3307      public static final int ERROR_VALUE = 1;
3308      /**
3309       * <code>FATAL = 2;</code>
3310       *
3311       * <pre>
3312       * Fatal error - connection closed
3313       * </pre>
3314       */
3315      public static final int FATAL_VALUE = 2;
3316
3317
3318      public final int getNumber() { return value; }
3319
3320      public static RpcStatusProto valueOf(int value) {
3321        switch (value) {
3322          case 0: return SUCCESS;
3323          case 1: return ERROR;
3324          case 2: return FATAL;
3325          default: return null;
3326        }
3327      }
3328
3329      public static com.google.protobuf.Internal.EnumLiteMap<RpcStatusProto>
3330          internalGetValueMap() {
3331        return internalValueMap;
3332      }
3333      private static com.google.protobuf.Internal.EnumLiteMap<RpcStatusProto>
3334          internalValueMap =
3335            new com.google.protobuf.Internal.EnumLiteMap<RpcStatusProto>() {
3336              public RpcStatusProto findValueByNumber(int number) {
3337                return RpcStatusProto.valueOf(number);
3338              }
3339            };
3340
3341      public final com.google.protobuf.Descriptors.EnumValueDescriptor
3342          getValueDescriptor() {
3343        return getDescriptor().getValues().get(index);
3344      }
3345      public final com.google.protobuf.Descriptors.EnumDescriptor
3346          getDescriptorForType() {
3347        return getDescriptor();
3348      }
3349      public static final com.google.protobuf.Descriptors.EnumDescriptor
3350          getDescriptor() {
3351        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.getDescriptor().getEnumTypes().get(0);
3352      }
3353
3354      private static final RpcStatusProto[] VALUES = values();
3355
3356      public static RpcStatusProto valueOf(
3357          com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
3358        if (desc.getType() != getDescriptor()) {
3359          throw new java.lang.IllegalArgumentException(
3360            "EnumValueDescriptor is not for this type.");
3361        }
3362        return VALUES[desc.getIndex()];
3363      }
3364
3365      private final int index;
3366      private final int value;
3367
3368      private RpcStatusProto(int index, int value) {
3369        this.index = index;
3370        this.value = value;
3371      }
3372
3373      // @@protoc_insertion_point(enum_scope:hadoop.common.RpcResponseHeaderProto.RpcStatusProto)
3374    }
3375
3376    /**
3377     * Protobuf enum {@code hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto}
3378     */
3379    public enum RpcErrorCodeProto
3380        implements com.google.protobuf.ProtocolMessageEnum {
3381      /**
3382       * <code>ERROR_APPLICATION = 1;</code>
3383       *
3384       * <pre>
3385       * Non-fatal Rpc error - connection left open for future rpc calls
3386       * </pre>
3387       */
3388      ERROR_APPLICATION(0, 1),
3389      /**
3390       * <code>ERROR_NO_SUCH_METHOD = 2;</code>
3391       *
3392       * <pre>
3393       * Rpc error - no such method
3394       * </pre>
3395       */
3396      ERROR_NO_SUCH_METHOD(1, 2),
3397      /**
3398       * <code>ERROR_NO_SUCH_PROTOCOL = 3;</code>
3399       *
3400       * <pre>
3401       * Rpc error - no such protocol
3402       * </pre>
3403       */
3404      ERROR_NO_SUCH_PROTOCOL(2, 3),
3405      /**
3406       * <code>ERROR_RPC_SERVER = 4;</code>
3407       *
3408       * <pre>
3409       * Rpc error on server side
3410       * </pre>
3411       */
3412      ERROR_RPC_SERVER(3, 4),
3413      /**
3414       * <code>ERROR_SERIALIZING_RESPONSE = 5;</code>
3415       *
3416       * <pre>
3417       * error serializign response
3418       * </pre>
3419       */
3420      ERROR_SERIALIZING_RESPONSE(4, 5),
3421      /**
3422       * <code>ERROR_RPC_VERSION_MISMATCH = 6;</code>
3423       *
3424       * <pre>
3425       * Rpc protocol version mismatch
3426       * </pre>
3427       */
3428      ERROR_RPC_VERSION_MISMATCH(5, 6),
3429      /**
3430       * <code>FATAL_UNKNOWN = 10;</code>
3431       *
3432       * <pre>
3433       * Fatal Server side Rpc error - connection closed
3434       * </pre>
3435       */
3436      FATAL_UNKNOWN(6, 10),
3437      /**
3438       * <code>FATAL_UNSUPPORTED_SERIALIZATION = 11;</code>
3439       *
3440       * <pre>
3441       * IPC layer serilization type invalid
3442       * </pre>
3443       */
3444      FATAL_UNSUPPORTED_SERIALIZATION(7, 11),
3445      /**
3446       * <code>FATAL_INVALID_RPC_HEADER = 12;</code>
3447       *
3448       * <pre>
3449       * fields of RpcHeader are invalid
3450       * </pre>
3451       */
3452      FATAL_INVALID_RPC_HEADER(8, 12),
3453      /**
3454       * <code>FATAL_DESERIALIZING_REQUEST = 13;</code>
3455       *
3456       * <pre>
3457       * could not deserilize rpc request
3458       * </pre>
3459       */
3460      FATAL_DESERIALIZING_REQUEST(9, 13),
3461      /**
3462       * <code>FATAL_VERSION_MISMATCH = 14;</code>
3463       *
3464       * <pre>
3465       * Ipc Layer version mismatch
3466       * </pre>
3467       */
3468      FATAL_VERSION_MISMATCH(10, 14),
3469      /**
3470       * <code>FATAL_UNAUTHORIZED = 15;</code>
3471       *
3472       * <pre>
3473       * Auth failed
3474       * </pre>
3475       */
3476      FATAL_UNAUTHORIZED(11, 15),
3477      ;
3478
3479      /**
3480       * <code>ERROR_APPLICATION = 1;</code>
3481       *
3482       * <pre>
3483       * Non-fatal Rpc error - connection left open for future rpc calls
3484       * </pre>
3485       */
3486      public static final int ERROR_APPLICATION_VALUE = 1;
3487      /**
3488       * <code>ERROR_NO_SUCH_METHOD = 2;</code>
3489       *
3490       * <pre>
3491       * Rpc error - no such method
3492       * </pre>
3493       */
3494      public static final int ERROR_NO_SUCH_METHOD_VALUE = 2;
3495      /**
3496       * <code>ERROR_NO_SUCH_PROTOCOL = 3;</code>
3497       *
3498       * <pre>
3499       * Rpc error - no such protocol
3500       * </pre>
3501       */
3502      public static final int ERROR_NO_SUCH_PROTOCOL_VALUE = 3;
3503      /**
3504       * <code>ERROR_RPC_SERVER = 4;</code>
3505       *
3506       * <pre>
3507       * Rpc error on server side
3508       * </pre>
3509       */
3510      public static final int ERROR_RPC_SERVER_VALUE = 4;
3511      /**
3512       * <code>ERROR_SERIALIZING_RESPONSE = 5;</code>
3513       *
3514       * <pre>
3515       * error serializign response
3516       * </pre>
3517       */
3518      public static final int ERROR_SERIALIZING_RESPONSE_VALUE = 5;
3519      /**
3520       * <code>ERROR_RPC_VERSION_MISMATCH = 6;</code>
3521       *
3522       * <pre>
3523       * Rpc protocol version mismatch
3524       * </pre>
3525       */
3526      public static final int ERROR_RPC_VERSION_MISMATCH_VALUE = 6;
3527      /**
3528       * <code>FATAL_UNKNOWN = 10;</code>
3529       *
3530       * <pre>
3531       * Fatal Server side Rpc error - connection closed
3532       * </pre>
3533       */
3534      public static final int FATAL_UNKNOWN_VALUE = 10;
3535      /**
3536       * <code>FATAL_UNSUPPORTED_SERIALIZATION = 11;</code>
3537       *
3538       * <pre>
3539       * IPC layer serilization type invalid
3540       * </pre>
3541       */
3542      public static final int FATAL_UNSUPPORTED_SERIALIZATION_VALUE = 11;
3543      /**
3544       * <code>FATAL_INVALID_RPC_HEADER = 12;</code>
3545       *
3546       * <pre>
3547       * fields of RpcHeader are invalid
3548       * </pre>
3549       */
3550      public static final int FATAL_INVALID_RPC_HEADER_VALUE = 12;
3551      /**
3552       * <code>FATAL_DESERIALIZING_REQUEST = 13;</code>
3553       *
3554       * <pre>
3555       * could not deserilize rpc request
3556       * </pre>
3557       */
3558      public static final int FATAL_DESERIALIZING_REQUEST_VALUE = 13;
3559      /**
3560       * <code>FATAL_VERSION_MISMATCH = 14;</code>
3561       *
3562       * <pre>
3563       * Ipc Layer version mismatch
3564       * </pre>
3565       */
3566      public static final int FATAL_VERSION_MISMATCH_VALUE = 14;
3567      /**
3568       * <code>FATAL_UNAUTHORIZED = 15;</code>
3569       *
3570       * <pre>
3571       * Auth failed
3572       * </pre>
3573       */
3574      public static final int FATAL_UNAUTHORIZED_VALUE = 15;
3575
3576
3577      public final int getNumber() { return value; }
3578
3579      public static RpcErrorCodeProto valueOf(int value) {
3580        switch (value) {
3581          case 1: return ERROR_APPLICATION;
3582          case 2: return ERROR_NO_SUCH_METHOD;
3583          case 3: return ERROR_NO_SUCH_PROTOCOL;
3584          case 4: return ERROR_RPC_SERVER;
3585          case 5: return ERROR_SERIALIZING_RESPONSE;
3586          case 6: return ERROR_RPC_VERSION_MISMATCH;
3587          case 10: return FATAL_UNKNOWN;
3588          case 11: return FATAL_UNSUPPORTED_SERIALIZATION;
3589          case 12: return FATAL_INVALID_RPC_HEADER;
3590          case 13: return FATAL_DESERIALIZING_REQUEST;
3591          case 14: return FATAL_VERSION_MISMATCH;
3592          case 15: return FATAL_UNAUTHORIZED;
3593          default: return null;
3594        }
3595      }
3596
3597      public static com.google.protobuf.Internal.EnumLiteMap<RpcErrorCodeProto>
3598          internalGetValueMap() {
3599        return internalValueMap;
3600      }
3601      private static com.google.protobuf.Internal.EnumLiteMap<RpcErrorCodeProto>
3602          internalValueMap =
3603            new com.google.protobuf.Internal.EnumLiteMap<RpcErrorCodeProto>() {
3604              public RpcErrorCodeProto findValueByNumber(int number) {
3605                return RpcErrorCodeProto.valueOf(number);
3606              }
3607            };
3608
3609      public final com.google.protobuf.Descriptors.EnumValueDescriptor
3610          getValueDescriptor() {
3611        return getDescriptor().getValues().get(index);
3612      }
3613      public final com.google.protobuf.Descriptors.EnumDescriptor
3614          getDescriptorForType() {
3615        return getDescriptor();
3616      }
3617      public static final com.google.protobuf.Descriptors.EnumDescriptor
3618          getDescriptor() {
3619        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.getDescriptor().getEnumTypes().get(1);
3620      }
3621
3622      private static final RpcErrorCodeProto[] VALUES = values();
3623
3624      public static RpcErrorCodeProto valueOf(
3625          com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
3626        if (desc.getType() != getDescriptor()) {
3627          throw new java.lang.IllegalArgumentException(
3628            "EnumValueDescriptor is not for this type.");
3629        }
3630        return VALUES[desc.getIndex()];
3631      }
3632
3633      private final int index;
3634      private final int value;
3635
3636      private RpcErrorCodeProto(int index, int value) {
3637        this.index = index;
3638        this.value = value;
3639      }
3640
3641      // @@protoc_insertion_point(enum_scope:hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto)
3642    }
3643
3644    private int bitField0_;
3645    // required uint32 callId = 1;
3646    public static final int CALLID_FIELD_NUMBER = 1;
3647    private int callId_;
3648    /**
3649     * <code>required uint32 callId = 1;</code>
3650     *
3651     * <pre>
3652     * callId used in Request
3653     * </pre>
3654     */
3655    public boolean hasCallId() {
3656      return ((bitField0_ & 0x00000001) == 0x00000001);
3657    }
3658    /**
3659     * <code>required uint32 callId = 1;</code>
3660     *
3661     * <pre>
3662     * callId used in Request
3663     * </pre>
3664     */
3665    public int getCallId() {
3666      return callId_;
3667    }
3668
3669    // required .hadoop.common.RpcResponseHeaderProto.RpcStatusProto status = 2;
3670    public static final int STATUS_FIELD_NUMBER = 2;
3671    private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto status_;
3672    /**
3673     * <code>required .hadoop.common.RpcResponseHeaderProto.RpcStatusProto status = 2;</code>
3674     */
3675    public boolean hasStatus() {
3676      return ((bitField0_ & 0x00000002) == 0x00000002);
3677    }
3678    /**
3679     * <code>required .hadoop.common.RpcResponseHeaderProto.RpcStatusProto status = 2;</code>
3680     */
3681    public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto getStatus() {
3682      return status_;
3683    }
3684
3685    // optional uint32 serverIpcVersionNum = 3;
3686    public static final int SERVERIPCVERSIONNUM_FIELD_NUMBER = 3;
3687    private int serverIpcVersionNum_;
3688    /**
3689     * <code>optional uint32 serverIpcVersionNum = 3;</code>
3690     *
3691     * <pre>
3692     * Sent if success or fail
3693     * </pre>
3694     */
3695    public boolean hasServerIpcVersionNum() {
3696      return ((bitField0_ & 0x00000004) == 0x00000004);
3697    }
3698    /**
3699     * <code>optional uint32 serverIpcVersionNum = 3;</code>
3700     *
3701     * <pre>
3702     * Sent if success or fail
3703     * </pre>
3704     */
3705    public int getServerIpcVersionNum() {
3706      return serverIpcVersionNum_;
3707    }
3708
3709    // optional string exceptionClassName = 4;
3710    public static final int EXCEPTIONCLASSNAME_FIELD_NUMBER = 4;
3711    private java.lang.Object exceptionClassName_;
3712    /**
3713     * <code>optional string exceptionClassName = 4;</code>
3714     *
3715     * <pre>
3716     * if request fails
3717     * </pre>
3718     */
3719    public boolean hasExceptionClassName() {
3720      return ((bitField0_ & 0x00000008) == 0x00000008);
3721    }
3722    /**
3723     * <code>optional string exceptionClassName = 4;</code>
3724     *
3725     * <pre>
3726     * if request fails
3727     * </pre>
3728     */
3729    public java.lang.String getExceptionClassName() {
3730      java.lang.Object ref = exceptionClassName_;
3731      if (ref instanceof java.lang.String) {
3732        return (java.lang.String) ref;
3733      } else {
3734        com.google.protobuf.ByteString bs = 
3735            (com.google.protobuf.ByteString) ref;
3736        java.lang.String s = bs.toStringUtf8();
3737        if (bs.isValidUtf8()) {
3738          exceptionClassName_ = s;
3739        }
3740        return s;
3741      }
3742    }
3743    /**
3744     * <code>optional string exceptionClassName = 4;</code>
3745     *
3746     * <pre>
3747     * if request fails
3748     * </pre>
3749     */
3750    public com.google.protobuf.ByteString
3751        getExceptionClassNameBytes() {
3752      java.lang.Object ref = exceptionClassName_;
3753      if (ref instanceof java.lang.String) {
3754        com.google.protobuf.ByteString b = 
3755            com.google.protobuf.ByteString.copyFromUtf8(
3756                (java.lang.String) ref);
3757        exceptionClassName_ = b;
3758        return b;
3759      } else {
3760        return (com.google.protobuf.ByteString) ref;
3761      }
3762    }
3763
3764    // optional string errorMsg = 5;
3765    public static final int ERRORMSG_FIELD_NUMBER = 5;
3766    private java.lang.Object errorMsg_;
3767    /**
3768     * <code>optional string errorMsg = 5;</code>
3769     *
3770     * <pre>
3771     * if request fails, often contains strack trace
3772     * </pre>
3773     */
3774    public boolean hasErrorMsg() {
3775      return ((bitField0_ & 0x00000010) == 0x00000010);
3776    }
3777    /**
3778     * <code>optional string errorMsg = 5;</code>
3779     *
3780     * <pre>
3781     * if request fails, often contains strack trace
3782     * </pre>
3783     */
3784    public java.lang.String getErrorMsg() {
3785      java.lang.Object ref = errorMsg_;
3786      if (ref instanceof java.lang.String) {
3787        return (java.lang.String) ref;
3788      } else {
3789        com.google.protobuf.ByteString bs = 
3790            (com.google.protobuf.ByteString) ref;
3791        java.lang.String s = bs.toStringUtf8();
3792        if (bs.isValidUtf8()) {
3793          errorMsg_ = s;
3794        }
3795        return s;
3796      }
3797    }
3798    /**
3799     * <code>optional string errorMsg = 5;</code>
3800     *
3801     * <pre>
3802     * if request fails, often contains strack trace
3803     * </pre>
3804     */
3805    public com.google.protobuf.ByteString
3806        getErrorMsgBytes() {
3807      java.lang.Object ref = errorMsg_;
3808      if (ref instanceof java.lang.String) {
3809        com.google.protobuf.ByteString b = 
3810            com.google.protobuf.ByteString.copyFromUtf8(
3811                (java.lang.String) ref);
3812        errorMsg_ = b;
3813        return b;
3814      } else {
3815        return (com.google.protobuf.ByteString) ref;
3816      }
3817    }
3818
3819    // optional .hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail = 6;
3820    public static final int ERRORDETAIL_FIELD_NUMBER = 6;
3821    private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail_;
3822    /**
3823     * <code>optional .hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail = 6;</code>
3824     *
3825     * <pre>
3826     * in case of error
3827     * </pre>
3828     */
3829    public boolean hasErrorDetail() {
3830      return ((bitField0_ & 0x00000020) == 0x00000020);
3831    }
3832    /**
3833     * <code>optional .hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail = 6;</code>
3834     *
3835     * <pre>
3836     * in case of error
3837     * </pre>
3838     */
3839    public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto getErrorDetail() {
3840      return errorDetail_;
3841    }
3842
3843    // optional bytes clientId = 7;
3844    public static final int CLIENTID_FIELD_NUMBER = 7;
3845    private com.google.protobuf.ByteString clientId_;
3846    /**
3847     * <code>optional bytes clientId = 7;</code>
3848     *
3849     * <pre>
3850     * Globally unique client ID
3851     * </pre>
3852     */
3853    public boolean hasClientId() {
3854      return ((bitField0_ & 0x00000040) == 0x00000040);
3855    }
3856    /**
3857     * <code>optional bytes clientId = 7;</code>
3858     *
3859     * <pre>
3860     * Globally unique client ID
3861     * </pre>
3862     */
3863    public com.google.protobuf.ByteString getClientId() {
3864      return clientId_;
3865    }
3866
3867    // optional sint32 retryCount = 8 [default = -1];
3868    public static final int RETRYCOUNT_FIELD_NUMBER = 8;
3869    private int retryCount_;
3870    /**
3871     * <code>optional sint32 retryCount = 8 [default = -1];</code>
3872     */
3873    public boolean hasRetryCount() {
3874      return ((bitField0_ & 0x00000080) == 0x00000080);
3875    }
3876    /**
3877     * <code>optional sint32 retryCount = 8 [default = -1];</code>
3878     */
3879    public int getRetryCount() {
3880      return retryCount_;
3881    }
3882
3883    private void initFields() {
3884      callId_ = 0;
3885      status_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto.SUCCESS;
3886      serverIpcVersionNum_ = 0;
3887      exceptionClassName_ = "";
3888      errorMsg_ = "";
3889      errorDetail_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto.ERROR_APPLICATION;
3890      clientId_ = com.google.protobuf.ByteString.EMPTY;
3891      retryCount_ = -1;
3892    }
3893    private byte memoizedIsInitialized = -1;
3894    public final boolean isInitialized() {
3895      byte isInitialized = memoizedIsInitialized;
3896      if (isInitialized != -1) return isInitialized == 1;
3897
3898      if (!hasCallId()) {
3899        memoizedIsInitialized = 0;
3900        return false;
3901      }
3902      if (!hasStatus()) {
3903        memoizedIsInitialized = 0;
3904        return false;
3905      }
3906      memoizedIsInitialized = 1;
3907      return true;
3908    }
3909
3910    public void writeTo(com.google.protobuf.CodedOutputStream output)
3911                        throws java.io.IOException {
3912      getSerializedSize();
3913      if (((bitField0_ & 0x00000001) == 0x00000001)) {
3914        output.writeUInt32(1, callId_);
3915      }
3916      if (((bitField0_ & 0x00000002) == 0x00000002)) {
3917        output.writeEnum(2, status_.getNumber());
3918      }
3919      if (((bitField0_ & 0x00000004) == 0x00000004)) {
3920        output.writeUInt32(3, serverIpcVersionNum_);
3921      }
3922      if (((bitField0_ & 0x00000008) == 0x00000008)) {
3923        output.writeBytes(4, getExceptionClassNameBytes());
3924      }
3925      if (((bitField0_ & 0x00000010) == 0x00000010)) {
3926        output.writeBytes(5, getErrorMsgBytes());
3927      }
3928      if (((bitField0_ & 0x00000020) == 0x00000020)) {
3929        output.writeEnum(6, errorDetail_.getNumber());
3930      }
3931      if (((bitField0_ & 0x00000040) == 0x00000040)) {
3932        output.writeBytes(7, clientId_);
3933      }
3934      if (((bitField0_ & 0x00000080) == 0x00000080)) {
3935        output.writeSInt32(8, retryCount_);
3936      }
3937      getUnknownFields().writeTo(output);
3938    }
3939
3940    private int memoizedSerializedSize = -1;
3941    public int getSerializedSize() {
3942      int size = memoizedSerializedSize;
3943      if (size != -1) return size;
3944
3945      size = 0;
3946      if (((bitField0_ & 0x00000001) == 0x00000001)) {
3947        size += com.google.protobuf.CodedOutputStream
3948          .computeUInt32Size(1, callId_);
3949      }
3950      if (((bitField0_ & 0x00000002) == 0x00000002)) {
3951        size += com.google.protobuf.CodedOutputStream
3952          .computeEnumSize(2, status_.getNumber());
3953      }
3954      if (((bitField0_ & 0x00000004) == 0x00000004)) {
3955        size += com.google.protobuf.CodedOutputStream
3956          .computeUInt32Size(3, serverIpcVersionNum_);
3957      }
3958      if (((bitField0_ & 0x00000008) == 0x00000008)) {
3959        size += com.google.protobuf.CodedOutputStream
3960          .computeBytesSize(4, getExceptionClassNameBytes());
3961      }
3962      if (((bitField0_ & 0x00000010) == 0x00000010)) {
3963        size += com.google.protobuf.CodedOutputStream
3964          .computeBytesSize(5, getErrorMsgBytes());
3965      }
3966      if (((bitField0_ & 0x00000020) == 0x00000020)) {
3967        size += com.google.protobuf.CodedOutputStream
3968          .computeEnumSize(6, errorDetail_.getNumber());
3969      }
3970      if (((bitField0_ & 0x00000040) == 0x00000040)) {
3971        size += com.google.protobuf.CodedOutputStream
3972          .computeBytesSize(7, clientId_);
3973      }
3974      if (((bitField0_ & 0x00000080) == 0x00000080)) {
3975        size += com.google.protobuf.CodedOutputStream
3976          .computeSInt32Size(8, retryCount_);
3977      }
3978      size += getUnknownFields().getSerializedSize();
3979      memoizedSerializedSize = size;
3980      return size;
3981    }
3982
3983    private static final long serialVersionUID = 0L;
3984    @java.lang.Override
3985    protected java.lang.Object writeReplace()
3986        throws java.io.ObjectStreamException {
3987      return super.writeReplace();
3988    }
3989
3990    @java.lang.Override
3991    public boolean equals(final java.lang.Object obj) {
3992      if (obj == this) {
3993       return true;
3994      }
3995      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto)) {
3996        return super.equals(obj);
3997      }
3998      org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto other = (org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto) obj;
3999
4000      boolean result = true;
4001      result = result && (hasCallId() == other.hasCallId());
4002      if (hasCallId()) {
4003        result = result && (getCallId()
4004            == other.getCallId());
4005      }
4006      result = result && (hasStatus() == other.hasStatus());
4007      if (hasStatus()) {
4008        result = result &&
4009            (getStatus() == other.getStatus());
4010      }
4011      result = result && (hasServerIpcVersionNum() == other.hasServerIpcVersionNum());
4012      if (hasServerIpcVersionNum()) {
4013        result = result && (getServerIpcVersionNum()
4014            == other.getServerIpcVersionNum());
4015      }
4016      result = result && (hasExceptionClassName() == other.hasExceptionClassName());
4017      if (hasExceptionClassName()) {
4018        result = result && getExceptionClassName()
4019            .equals(other.getExceptionClassName());
4020      }
4021      result = result && (hasErrorMsg() == other.hasErrorMsg());
4022      if (hasErrorMsg()) {
4023        result = result && getErrorMsg()
4024            .equals(other.getErrorMsg());
4025      }
4026      result = result && (hasErrorDetail() == other.hasErrorDetail());
4027      if (hasErrorDetail()) {
4028        result = result &&
4029            (getErrorDetail() == other.getErrorDetail());
4030      }
4031      result = result && (hasClientId() == other.hasClientId());
4032      if (hasClientId()) {
4033        result = result && getClientId()
4034            .equals(other.getClientId());
4035      }
4036      result = result && (hasRetryCount() == other.hasRetryCount());
4037      if (hasRetryCount()) {
4038        result = result && (getRetryCount()
4039            == other.getRetryCount());
4040      }
4041      result = result &&
4042          getUnknownFields().equals(other.getUnknownFields());
4043      return result;
4044    }
4045
4046    private int memoizedHashCode = 0;
4047    @java.lang.Override
4048    public int hashCode() {
4049      if (memoizedHashCode != 0) {
4050        return memoizedHashCode;
4051      }
4052      int hash = 41;
4053      hash = (19 * hash) + getDescriptorForType().hashCode();
4054      if (hasCallId()) {
4055        hash = (37 * hash) + CALLID_FIELD_NUMBER;
4056        hash = (53 * hash) + getCallId();
4057      }
4058      if (hasStatus()) {
4059        hash = (37 * hash) + STATUS_FIELD_NUMBER;
4060        hash = (53 * hash) + hashEnum(getStatus());
4061      }
4062      if (hasServerIpcVersionNum()) {
4063        hash = (37 * hash) + SERVERIPCVERSIONNUM_FIELD_NUMBER;
4064        hash = (53 * hash) + getServerIpcVersionNum();
4065      }
4066      if (hasExceptionClassName()) {
4067        hash = (37 * hash) + EXCEPTIONCLASSNAME_FIELD_NUMBER;
4068        hash = (53 * hash) + getExceptionClassName().hashCode();
4069      }
4070      if (hasErrorMsg()) {
4071        hash = (37 * hash) + ERRORMSG_FIELD_NUMBER;
4072        hash = (53 * hash) + getErrorMsg().hashCode();
4073      }
4074      if (hasErrorDetail()) {
4075        hash = (37 * hash) + ERRORDETAIL_FIELD_NUMBER;
4076        hash = (53 * hash) + hashEnum(getErrorDetail());
4077      }
4078      if (hasClientId()) {
4079        hash = (37 * hash) + CLIENTID_FIELD_NUMBER;
4080        hash = (53 * hash) + getClientId().hashCode();
4081      }
4082      if (hasRetryCount()) {
4083        hash = (37 * hash) + RETRYCOUNT_FIELD_NUMBER;
4084        hash = (53 * hash) + getRetryCount();
4085      }
4086      hash = (29 * hash) + getUnknownFields().hashCode();
4087      memoizedHashCode = hash;
4088      return hash;
4089    }
4090
4091    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parseFrom(
4092        com.google.protobuf.ByteString data)
4093        throws com.google.protobuf.InvalidProtocolBufferException {
4094      return PARSER.parseFrom(data);
4095    }
4096    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parseFrom(
4097        com.google.protobuf.ByteString data,
4098        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4099        throws com.google.protobuf.InvalidProtocolBufferException {
4100      return PARSER.parseFrom(data, extensionRegistry);
4101    }
4102    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parseFrom(byte[] data)
4103        throws com.google.protobuf.InvalidProtocolBufferException {
4104      return PARSER.parseFrom(data);
4105    }
4106    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parseFrom(
4107        byte[] data,
4108        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4109        throws com.google.protobuf.InvalidProtocolBufferException {
4110      return PARSER.parseFrom(data, extensionRegistry);
4111    }
4112    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parseFrom(java.io.InputStream input)
4113        throws java.io.IOException {
4114      return PARSER.parseFrom(input);
4115    }
4116    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parseFrom(
4117        java.io.InputStream input,
4118        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4119        throws java.io.IOException {
4120      return PARSER.parseFrom(input, extensionRegistry);
4121    }
4122    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parseDelimitedFrom(java.io.InputStream input)
4123        throws java.io.IOException {
4124      return PARSER.parseDelimitedFrom(input);
4125    }
4126    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parseDelimitedFrom(
4127        java.io.InputStream input,
4128        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4129        throws java.io.IOException {
4130      return PARSER.parseDelimitedFrom(input, extensionRegistry);
4131    }
4132    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parseFrom(
4133        com.google.protobuf.CodedInputStream input)
4134        throws java.io.IOException {
4135      return PARSER.parseFrom(input);
4136    }
4137    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parseFrom(
4138        com.google.protobuf.CodedInputStream input,
4139        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4140        throws java.io.IOException {
4141      return PARSER.parseFrom(input, extensionRegistry);
4142    }
4143
4144    public static Builder newBuilder() { return Builder.create(); }
4145    public Builder newBuilderForType() { return newBuilder(); }
4146    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto prototype) {
4147      return newBuilder().mergeFrom(prototype);
4148    }
4149    public Builder toBuilder() { return newBuilder(this); }
4150
4151    @java.lang.Override
4152    protected Builder newBuilderForType(
4153        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
4154      Builder builder = new Builder(parent);
4155      return builder;
4156    }
4157    /**
4158     * Protobuf type {@code hadoop.common.RpcResponseHeaderProto}
4159     *
4160     * <pre>
4161     **
4162     * Rpc Response Header
4163     * +------------------------------------------------------------------+
4164     * | Rpc total response length in bytes (4 bytes int)                 |
4165     * |  (sum of next two parts)                                         |
4166     * +------------------------------------------------------------------+
4167     * | RpcResponseHeaderProto - serialized delimited ie has len         |
4168     * +------------------------------------------------------------------+
4169     * | if request is successful:                                        |
4170     * |   - RpcResponse -  The actual rpc response  bytes follow         |
4171     * |     the response header                                          |
4172     * |     This response is serialized based on RpcKindProto            |
4173     * | if request fails :                                               |
4174     * |   The rpc response header contains the necessary info            |
4175     * +------------------------------------------------------------------+
4176     *
4177     * Note that rpc response header is also used when connection setup fails. 
4178     * Ie the response looks like a rpc response with a fake callId.
4179     * </pre>
4180     */
4181    public static final class Builder extends
4182        com.google.protobuf.GeneratedMessage.Builder<Builder>
4183       implements org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProtoOrBuilder {
4184      public static final com.google.protobuf.Descriptors.Descriptor
4185          getDescriptor() {
4186        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcResponseHeaderProto_descriptor;
4187      }
4188
4189      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
4190          internalGetFieldAccessorTable() {
4191        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcResponseHeaderProto_fieldAccessorTable
4192            .ensureFieldAccessorsInitialized(
4193                org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.Builder.class);
4194      }
4195
4196      // Construct using org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.newBuilder()
4197      private Builder() {
4198        maybeForceBuilderInitialization();
4199      }
4200
4201      private Builder(
4202          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
4203        super(parent);
4204        maybeForceBuilderInitialization();
4205      }
4206      private void maybeForceBuilderInitialization() {
4207        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
4208        }
4209      }
4210      private static Builder create() {
4211        return new Builder();
4212      }
4213
4214      public Builder clear() {
4215        super.clear();
4216        callId_ = 0;
4217        bitField0_ = (bitField0_ & ~0x00000001);
4218        status_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto.SUCCESS;
4219        bitField0_ = (bitField0_ & ~0x00000002);
4220        serverIpcVersionNum_ = 0;
4221        bitField0_ = (bitField0_ & ~0x00000004);
4222        exceptionClassName_ = "";
4223        bitField0_ = (bitField0_ & ~0x00000008);
4224        errorMsg_ = "";
4225        bitField0_ = (bitField0_ & ~0x00000010);
4226        errorDetail_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto.ERROR_APPLICATION;
4227        bitField0_ = (bitField0_ & ~0x00000020);
4228        clientId_ = com.google.protobuf.ByteString.EMPTY;
4229        bitField0_ = (bitField0_ & ~0x00000040);
4230        retryCount_ = -1;
4231        bitField0_ = (bitField0_ & ~0x00000080);
4232        return this;
4233      }
4234
4235      public Builder clone() {
4236        return create().mergeFrom(buildPartial());
4237      }
4238
4239      public com.google.protobuf.Descriptors.Descriptor
4240          getDescriptorForType() {
4241        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcResponseHeaderProto_descriptor;
4242      }
4243
4244      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto getDefaultInstanceForType() {
4245        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.getDefaultInstance();
4246      }
4247
4248      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto build() {
4249        org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto result = buildPartial();
4250        if (!result.isInitialized()) {
4251          throw newUninitializedMessageException(result);
4252        }
4253        return result;
4254      }
4255
4256      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto buildPartial() {
4257        org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto result = new org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto(this);
4258        int from_bitField0_ = bitField0_;
4259        int to_bitField0_ = 0;
4260        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
4261          to_bitField0_ |= 0x00000001;
4262        }
4263        result.callId_ = callId_;
4264        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
4265          to_bitField0_ |= 0x00000002;
4266        }
4267        result.status_ = status_;
4268        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
4269          to_bitField0_ |= 0x00000004;
4270        }
4271        result.serverIpcVersionNum_ = serverIpcVersionNum_;
4272        if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
4273          to_bitField0_ |= 0x00000008;
4274        }
4275        result.exceptionClassName_ = exceptionClassName_;
4276        if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
4277          to_bitField0_ |= 0x00000010;
4278        }
4279        result.errorMsg_ = errorMsg_;
4280        if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
4281          to_bitField0_ |= 0x00000020;
4282        }
4283        result.errorDetail_ = errorDetail_;
4284        if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
4285          to_bitField0_ |= 0x00000040;
4286        }
4287        result.clientId_ = clientId_;
4288        if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
4289          to_bitField0_ |= 0x00000080;
4290        }
4291        result.retryCount_ = retryCount_;
4292        result.bitField0_ = to_bitField0_;
4293        onBuilt();
4294        return result;
4295      }
4296
4297      public Builder mergeFrom(com.google.protobuf.Message other) {
4298        if (other instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto) {
4299          return mergeFrom((org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto)other);
4300        } else {
4301          super.mergeFrom(other);
4302          return this;
4303        }
4304      }
4305
4306      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto other) {
4307        if (other == org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.getDefaultInstance()) return this;
4308        if (other.hasCallId()) {
4309          setCallId(other.getCallId());
4310        }
4311        if (other.hasStatus()) {
4312          setStatus(other.getStatus());
4313        }
4314        if (other.hasServerIpcVersionNum()) {
4315          setServerIpcVersionNum(other.getServerIpcVersionNum());
4316        }
4317        if (other.hasExceptionClassName()) {
4318          bitField0_ |= 0x00000008;
4319          exceptionClassName_ = other.exceptionClassName_;
4320          onChanged();
4321        }
4322        if (other.hasErrorMsg()) {
4323          bitField0_ |= 0x00000010;
4324          errorMsg_ = other.errorMsg_;
4325          onChanged();
4326        }
4327        if (other.hasErrorDetail()) {
4328          setErrorDetail(other.getErrorDetail());
4329        }
4330        if (other.hasClientId()) {
4331          setClientId(other.getClientId());
4332        }
4333        if (other.hasRetryCount()) {
4334          setRetryCount(other.getRetryCount());
4335        }
4336        this.mergeUnknownFields(other.getUnknownFields());
4337        return this;
4338      }
4339
4340      public final boolean isInitialized() {
4341        if (!hasCallId()) {
4342          
4343          return false;
4344        }
4345        if (!hasStatus()) {
4346          
4347          return false;
4348        }
4349        return true;
4350      }
4351
4352      public Builder mergeFrom(
4353          com.google.protobuf.CodedInputStream input,
4354          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4355          throws java.io.IOException {
4356        org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto parsedMessage = null;
4357        try {
4358          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
4359        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
4360          parsedMessage = (org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto) e.getUnfinishedMessage();
4361          throw e;
4362        } finally {
4363          if (parsedMessage != null) {
4364            mergeFrom(parsedMessage);
4365          }
4366        }
4367        return this;
4368      }
4369      private int bitField0_;
4370
4371      // required uint32 callId = 1;
4372      private int callId_ ;
4373      /**
4374       * <code>required uint32 callId = 1;</code>
4375       *
4376       * <pre>
4377       * callId used in Request
4378       * </pre>
4379       */
4380      public boolean hasCallId() {
4381        return ((bitField0_ & 0x00000001) == 0x00000001);
4382      }
4383      /**
4384       * <code>required uint32 callId = 1;</code>
4385       *
4386       * <pre>
4387       * callId used in Request
4388       * </pre>
4389       */
4390      public int getCallId() {
4391        return callId_;
4392      }
4393      /**
4394       * <code>required uint32 callId = 1;</code>
4395       *
4396       * <pre>
4397       * callId used in Request
4398       * </pre>
4399       */
4400      public Builder setCallId(int value) {
4401        bitField0_ |= 0x00000001;
4402        callId_ = value;
4403        onChanged();
4404        return this;
4405      }
4406      /**
4407       * <code>required uint32 callId = 1;</code>
4408       *
4409       * <pre>
4410       * callId used in Request
4411       * </pre>
4412       */
4413      public Builder clearCallId() {
4414        bitField0_ = (bitField0_ & ~0x00000001);
4415        callId_ = 0;
4416        onChanged();
4417        return this;
4418      }
4419
4420      // required .hadoop.common.RpcResponseHeaderProto.RpcStatusProto status = 2;
4421      private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto status_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto.SUCCESS;
4422      /**
4423       * <code>required .hadoop.common.RpcResponseHeaderProto.RpcStatusProto status = 2;</code>
4424       */
4425      public boolean hasStatus() {
4426        return ((bitField0_ & 0x00000002) == 0x00000002);
4427      }
4428      /**
4429       * <code>required .hadoop.common.RpcResponseHeaderProto.RpcStatusProto status = 2;</code>
4430       */
4431      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto getStatus() {
4432        return status_;
4433      }
4434      /**
4435       * <code>required .hadoop.common.RpcResponseHeaderProto.RpcStatusProto status = 2;</code>
4436       */
4437      public Builder setStatus(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto value) {
4438        if (value == null) {
4439          throw new NullPointerException();
4440        }
4441        bitField0_ |= 0x00000002;
4442        status_ = value;
4443        onChanged();
4444        return this;
4445      }
4446      /**
4447       * <code>required .hadoop.common.RpcResponseHeaderProto.RpcStatusProto status = 2;</code>
4448       */
4449      public Builder clearStatus() {
4450        bitField0_ = (bitField0_ & ~0x00000002);
4451        status_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto.SUCCESS;
4452        onChanged();
4453        return this;
4454      }
4455
4456      // optional uint32 serverIpcVersionNum = 3;
4457      private int serverIpcVersionNum_ ;
4458      /**
4459       * <code>optional uint32 serverIpcVersionNum = 3;</code>
4460       *
4461       * <pre>
4462       * Sent if success or fail
4463       * </pre>
4464       */
4465      public boolean hasServerIpcVersionNum() {
4466        return ((bitField0_ & 0x00000004) == 0x00000004);
4467      }
4468      /**
4469       * <code>optional uint32 serverIpcVersionNum = 3;</code>
4470       *
4471       * <pre>
4472       * Sent if success or fail
4473       * </pre>
4474       */
4475      public int getServerIpcVersionNum() {
4476        return serverIpcVersionNum_;
4477      }
4478      /**
4479       * <code>optional uint32 serverIpcVersionNum = 3;</code>
4480       *
4481       * <pre>
4482       * Sent if success or fail
4483       * </pre>
4484       */
4485      public Builder setServerIpcVersionNum(int value) {
4486        bitField0_ |= 0x00000004;
4487        serverIpcVersionNum_ = value;
4488        onChanged();
4489        return this;
4490      }
4491      /**
4492       * <code>optional uint32 serverIpcVersionNum = 3;</code>
4493       *
4494       * <pre>
4495       * Sent if success or fail
4496       * </pre>
4497       */
4498      public Builder clearServerIpcVersionNum() {
4499        bitField0_ = (bitField0_ & ~0x00000004);
4500        serverIpcVersionNum_ = 0;
4501        onChanged();
4502        return this;
4503      }
4504
4505      // optional string exceptionClassName = 4;
4506      private java.lang.Object exceptionClassName_ = "";
4507      /**
4508       * <code>optional string exceptionClassName = 4;</code>
4509       *
4510       * <pre>
4511       * if request fails
4512       * </pre>
4513       */
4514      public boolean hasExceptionClassName() {
4515        return ((bitField0_ & 0x00000008) == 0x00000008);
4516      }
4517      /**
4518       * <code>optional string exceptionClassName = 4;</code>
4519       *
4520       * <pre>
4521       * if request fails
4522       * </pre>
4523       */
4524      public java.lang.String getExceptionClassName() {
4525        java.lang.Object ref = exceptionClassName_;
4526        if (!(ref instanceof java.lang.String)) {
4527          java.lang.String s = ((com.google.protobuf.ByteString) ref)
4528              .toStringUtf8();
4529          exceptionClassName_ = s;
4530          return s;
4531        } else {
4532          return (java.lang.String) ref;
4533        }
4534      }
4535      /**
4536       * <code>optional string exceptionClassName = 4;</code>
4537       *
4538       * <pre>
4539       * if request fails
4540       * </pre>
4541       */
4542      public com.google.protobuf.ByteString
4543          getExceptionClassNameBytes() {
4544        java.lang.Object ref = exceptionClassName_;
4545        if (ref instanceof String) {
4546          com.google.protobuf.ByteString b = 
4547              com.google.protobuf.ByteString.copyFromUtf8(
4548                  (java.lang.String) ref);
4549          exceptionClassName_ = b;
4550          return b;
4551        } else {
4552          return (com.google.protobuf.ByteString) ref;
4553        }
4554      }
4555      /**
4556       * <code>optional string exceptionClassName = 4;</code>
4557       *
4558       * <pre>
4559       * if request fails
4560       * </pre>
4561       */
4562      public Builder setExceptionClassName(
4563          java.lang.String value) {
4564        if (value == null) {
4565    throw new NullPointerException();
4566  }
4567  bitField0_ |= 0x00000008;
4568        exceptionClassName_ = value;
4569        onChanged();
4570        return this;
4571      }
4572      /**
4573       * <code>optional string exceptionClassName = 4;</code>
4574       *
4575       * <pre>
4576       * if request fails
4577       * </pre>
4578       */
4579      public Builder clearExceptionClassName() {
4580        bitField0_ = (bitField0_ & ~0x00000008);
4581        exceptionClassName_ = getDefaultInstance().getExceptionClassName();
4582        onChanged();
4583        return this;
4584      }
4585      /**
4586       * <code>optional string exceptionClassName = 4;</code>
4587       *
4588       * <pre>
4589       * if request fails
4590       * </pre>
4591       */
4592      public Builder setExceptionClassNameBytes(
4593          com.google.protobuf.ByteString value) {
4594        if (value == null) {
4595    throw new NullPointerException();
4596  }
4597  bitField0_ |= 0x00000008;
4598        exceptionClassName_ = value;
4599        onChanged();
4600        return this;
4601      }
4602
4603      // optional string errorMsg = 5;
4604      private java.lang.Object errorMsg_ = "";
4605      /**
4606       * <code>optional string errorMsg = 5;</code>
4607       *
4608       * <pre>
4609       * if request fails, often contains strack trace
4610       * </pre>
4611       */
4612      public boolean hasErrorMsg() {
4613        return ((bitField0_ & 0x00000010) == 0x00000010);
4614      }
4615      /**
4616       * <code>optional string errorMsg = 5;</code>
4617       *
4618       * <pre>
4619       * if request fails, often contains strack trace
4620       * </pre>
4621       */
4622      public java.lang.String getErrorMsg() {
4623        java.lang.Object ref = errorMsg_;
4624        if (!(ref instanceof java.lang.String)) {
4625          java.lang.String s = ((com.google.protobuf.ByteString) ref)
4626              .toStringUtf8();
4627          errorMsg_ = s;
4628          return s;
4629        } else {
4630          return (java.lang.String) ref;
4631        }
4632      }
4633      /**
4634       * <code>optional string errorMsg = 5;</code>
4635       *
4636       * <pre>
4637       * if request fails, often contains strack trace
4638       * </pre>
4639       */
4640      public com.google.protobuf.ByteString
4641          getErrorMsgBytes() {
4642        java.lang.Object ref = errorMsg_;
4643        if (ref instanceof String) {
4644          com.google.protobuf.ByteString b = 
4645              com.google.protobuf.ByteString.copyFromUtf8(
4646                  (java.lang.String) ref);
4647          errorMsg_ = b;
4648          return b;
4649        } else {
4650          return (com.google.protobuf.ByteString) ref;
4651        }
4652      }
4653      /**
4654       * <code>optional string errorMsg = 5;</code>
4655       *
4656       * <pre>
4657       * if request fails, often contains strack trace
4658       * </pre>
4659       */
4660      public Builder setErrorMsg(
4661          java.lang.String value) {
4662        if (value == null) {
4663    throw new NullPointerException();
4664  }
4665  bitField0_ |= 0x00000010;
4666        errorMsg_ = value;
4667        onChanged();
4668        return this;
4669      }
4670      /**
4671       * <code>optional string errorMsg = 5;</code>
4672       *
4673       * <pre>
4674       * if request fails, often contains strack trace
4675       * </pre>
4676       */
4677      public Builder clearErrorMsg() {
4678        bitField0_ = (bitField0_ & ~0x00000010);
4679        errorMsg_ = getDefaultInstance().getErrorMsg();
4680        onChanged();
4681        return this;
4682      }
4683      /**
4684       * <code>optional string errorMsg = 5;</code>
4685       *
4686       * <pre>
4687       * if request fails, often contains strack trace
4688       * </pre>
4689       */
4690      public Builder setErrorMsgBytes(
4691          com.google.protobuf.ByteString value) {
4692        if (value == null) {
4693    throw new NullPointerException();
4694  }
4695  bitField0_ |= 0x00000010;
4696        errorMsg_ = value;
4697        onChanged();
4698        return this;
4699      }
4700
4701      // optional .hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail = 6;
4702      private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto.ERROR_APPLICATION;
4703      /**
4704       * <code>optional .hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail = 6;</code>
4705       *
4706       * <pre>
4707       * in case of error
4708       * </pre>
4709       */
4710      public boolean hasErrorDetail() {
4711        return ((bitField0_ & 0x00000020) == 0x00000020);
4712      }
4713      /**
4714       * <code>optional .hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail = 6;</code>
4715       *
4716       * <pre>
4717       * in case of error
4718       * </pre>
4719       */
4720      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto getErrorDetail() {
4721        return errorDetail_;
4722      }
4723      /**
4724       * <code>optional .hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail = 6;</code>
4725       *
4726       * <pre>
4727       * in case of error
4728       * </pre>
4729       */
4730      public Builder setErrorDetail(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto value) {
4731        if (value == null) {
4732          throw new NullPointerException();
4733        }
4734        bitField0_ |= 0x00000020;
4735        errorDetail_ = value;
4736        onChanged();
4737        return this;
4738      }
4739      /**
4740       * <code>optional .hadoop.common.RpcResponseHeaderProto.RpcErrorCodeProto errorDetail = 6;</code>
4741       *
4742       * <pre>
4743       * in case of error
4744       * </pre>
4745       */
4746      public Builder clearErrorDetail() {
4747        bitField0_ = (bitField0_ & ~0x00000020);
4748        errorDetail_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto.ERROR_APPLICATION;
4749        onChanged();
4750        return this;
4751      }
4752
4753      // optional bytes clientId = 7;
4754      private com.google.protobuf.ByteString clientId_ = com.google.protobuf.ByteString.EMPTY;
4755      /**
4756       * <code>optional bytes clientId = 7;</code>
4757       *
4758       * <pre>
4759       * Globally unique client ID
4760       * </pre>
4761       */
4762      public boolean hasClientId() {
4763        return ((bitField0_ & 0x00000040) == 0x00000040);
4764      }
4765      /**
4766       * <code>optional bytes clientId = 7;</code>
4767       *
4768       * <pre>
4769       * Globally unique client ID
4770       * </pre>
4771       */
4772      public com.google.protobuf.ByteString getClientId() {
4773        return clientId_;
4774      }
4775      /**
4776       * <code>optional bytes clientId = 7;</code>
4777       *
4778       * <pre>
4779       * Globally unique client ID
4780       * </pre>
4781       */
4782      public Builder setClientId(com.google.protobuf.ByteString value) {
4783        if (value == null) {
4784    throw new NullPointerException();
4785  }
4786  bitField0_ |= 0x00000040;
4787        clientId_ = value;
4788        onChanged();
4789        return this;
4790      }
4791      /**
4792       * <code>optional bytes clientId = 7;</code>
4793       *
4794       * <pre>
4795       * Globally unique client ID
4796       * </pre>
4797       */
4798      public Builder clearClientId() {
4799        bitField0_ = (bitField0_ & ~0x00000040);
4800        clientId_ = getDefaultInstance().getClientId();
4801        onChanged();
4802        return this;
4803      }
4804
4805      // optional sint32 retryCount = 8 [default = -1];
4806      private int retryCount_ = -1;
4807      /**
4808       * <code>optional sint32 retryCount = 8 [default = -1];</code>
4809       */
4810      public boolean hasRetryCount() {
4811        return ((bitField0_ & 0x00000080) == 0x00000080);
4812      }
4813      /**
4814       * <code>optional sint32 retryCount = 8 [default = -1];</code>
4815       */
4816      public int getRetryCount() {
4817        return retryCount_;
4818      }
4819      /**
4820       * <code>optional sint32 retryCount = 8 [default = -1];</code>
4821       */
4822      public Builder setRetryCount(int value) {
4823        bitField0_ |= 0x00000080;
4824        retryCount_ = value;
4825        onChanged();
4826        return this;
4827      }
4828      /**
4829       * <code>optional sint32 retryCount = 8 [default = -1];</code>
4830       */
4831      public Builder clearRetryCount() {
4832        bitField0_ = (bitField0_ & ~0x00000080);
4833        retryCount_ = -1;
4834        onChanged();
4835        return this;
4836      }
4837
4838      // @@protoc_insertion_point(builder_scope:hadoop.common.RpcResponseHeaderProto)
4839    }
4840
4841    static {
4842      defaultInstance = new RpcResponseHeaderProto(true);
4843      defaultInstance.initFields();
4844    }
4845
4846    // @@protoc_insertion_point(class_scope:hadoop.common.RpcResponseHeaderProto)
4847  }
4848
4849  public interface RpcSaslProtoOrBuilder
4850      extends com.google.protobuf.MessageOrBuilder {
4851
4852    // optional uint32 version = 1;
4853    /**
4854     * <code>optional uint32 version = 1;</code>
4855     */
4856    boolean hasVersion();
4857    /**
4858     * <code>optional uint32 version = 1;</code>
4859     */
4860    int getVersion();
4861
4862    // required .hadoop.common.RpcSaslProto.SaslState state = 2;
4863    /**
4864     * <code>required .hadoop.common.RpcSaslProto.SaslState state = 2;</code>
4865     */
4866    boolean hasState();
4867    /**
4868     * <code>required .hadoop.common.RpcSaslProto.SaslState state = 2;</code>
4869     */
4870    org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState getState();
4871
4872    // optional bytes token = 3;
4873    /**
4874     * <code>optional bytes token = 3;</code>
4875     */
4876    boolean hasToken();
4877    /**
4878     * <code>optional bytes token = 3;</code>
4879     */
4880    com.google.protobuf.ByteString getToken();
4881
4882    // repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
4883    /**
4884     * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
4885     */
4886    java.util.List<org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth> 
4887        getAuthsList();
4888    /**
4889     * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
4890     */
4891    org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth getAuths(int index);
4892    /**
4893     * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
4894     */
4895    int getAuthsCount();
4896    /**
4897     * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
4898     */
4899    java.util.List<? extends org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuthOrBuilder> 
4900        getAuthsOrBuilderList();
4901    /**
4902     * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
4903     */
4904    org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuthOrBuilder getAuthsOrBuilder(
4905        int index);
4906  }
4907  /**
4908   * Protobuf type {@code hadoop.common.RpcSaslProto}
4909   */
4910  public static final class RpcSaslProto extends
4911      com.google.protobuf.GeneratedMessage
4912      implements RpcSaslProtoOrBuilder {
4913    // Use RpcSaslProto.newBuilder() to construct.
4914    private RpcSaslProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
4915      super(builder);
4916      this.unknownFields = builder.getUnknownFields();
4917    }
4918    private RpcSaslProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
4919
4920    private static final RpcSaslProto defaultInstance;
4921    public static RpcSaslProto getDefaultInstance() {
4922      return defaultInstance;
4923    }
4924
4925    public RpcSaslProto getDefaultInstanceForType() {
4926      return defaultInstance;
4927    }
4928
4929    private final com.google.protobuf.UnknownFieldSet unknownFields;
4930    @java.lang.Override
4931    public final com.google.protobuf.UnknownFieldSet
4932        getUnknownFields() {
4933      return this.unknownFields;
4934    }
4935    private RpcSaslProto(
4936        com.google.protobuf.CodedInputStream input,
4937        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4938        throws com.google.protobuf.InvalidProtocolBufferException {
4939      initFields();
4940      int mutable_bitField0_ = 0;
4941      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
4942          com.google.protobuf.UnknownFieldSet.newBuilder();
4943      try {
4944        boolean done = false;
4945        while (!done) {
4946          int tag = input.readTag();
4947          switch (tag) {
4948            case 0:
4949              done = true;
4950              break;
4951            default: {
4952              if (!parseUnknownField(input, unknownFields,
4953                                     extensionRegistry, tag)) {
4954                done = true;
4955              }
4956              break;
4957            }
4958            case 8: {
4959              bitField0_ |= 0x00000001;
4960              version_ = input.readUInt32();
4961              break;
4962            }
4963            case 16: {
4964              int rawValue = input.readEnum();
4965              org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState value = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState.valueOf(rawValue);
4966              if (value == null) {
4967                unknownFields.mergeVarintField(2, rawValue);
4968              } else {
4969                bitField0_ |= 0x00000002;
4970                state_ = value;
4971              }
4972              break;
4973            }
4974            case 26: {
4975              bitField0_ |= 0x00000004;
4976              token_ = input.readBytes();
4977              break;
4978            }
4979            case 34: {
4980              if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
4981                auths_ = new java.util.ArrayList<org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth>();
4982                mutable_bitField0_ |= 0x00000008;
4983              }
4984              auths_.add(input.readMessage(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.PARSER, extensionRegistry));
4985              break;
4986            }
4987          }
4988        }
4989      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
4990        throw e.setUnfinishedMessage(this);
4991      } catch (java.io.IOException e) {
4992        throw new com.google.protobuf.InvalidProtocolBufferException(
4993            e.getMessage()).setUnfinishedMessage(this);
4994      } finally {
4995        if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
4996          auths_ = java.util.Collections.unmodifiableList(auths_);
4997        }
4998        this.unknownFields = unknownFields.build();
4999        makeExtensionsImmutable();
5000      }
5001    }
5002    public static final com.google.protobuf.Descriptors.Descriptor
5003        getDescriptor() {
5004      return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcSaslProto_descriptor;
5005    }
5006
5007    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
5008        internalGetFieldAccessorTable() {
5009      return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcSaslProto_fieldAccessorTable
5010          .ensureFieldAccessorsInitialized(
5011              org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.Builder.class);
5012    }
5013
5014    public static com.google.protobuf.Parser<RpcSaslProto> PARSER =
5015        new com.google.protobuf.AbstractParser<RpcSaslProto>() {
5016      public RpcSaslProto parsePartialFrom(
5017          com.google.protobuf.CodedInputStream input,
5018          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5019          throws com.google.protobuf.InvalidProtocolBufferException {
5020        return new RpcSaslProto(input, extensionRegistry);
5021      }
5022    };
5023
5024    @java.lang.Override
5025    public com.google.protobuf.Parser<RpcSaslProto> getParserForType() {
5026      return PARSER;
5027    }
5028
5029    /**
5030     * Protobuf enum {@code hadoop.common.RpcSaslProto.SaslState}
5031     */
5032    public enum SaslState
5033        implements com.google.protobuf.ProtocolMessageEnum {
5034      /**
5035       * <code>SUCCESS = 0;</code>
5036       */
5037      SUCCESS(0, 0),
5038      /**
5039       * <code>NEGOTIATE = 1;</code>
5040       */
5041      NEGOTIATE(1, 1),
5042      /**
5043       * <code>INITIATE = 2;</code>
5044       */
5045      INITIATE(2, 2),
5046      /**
5047       * <code>CHALLENGE = 3;</code>
5048       */
5049      CHALLENGE(3, 3),
5050      /**
5051       * <code>RESPONSE = 4;</code>
5052       */
5053      RESPONSE(4, 4),
5054      /**
5055       * <code>WRAP = 5;</code>
5056       */
5057      WRAP(5, 5),
5058      ;
5059
5060      /**
5061       * <code>SUCCESS = 0;</code>
5062       */
5063      public static final int SUCCESS_VALUE = 0;
5064      /**
5065       * <code>NEGOTIATE = 1;</code>
5066       */
5067      public static final int NEGOTIATE_VALUE = 1;
5068      /**
5069       * <code>INITIATE = 2;</code>
5070       */
5071      public static final int INITIATE_VALUE = 2;
5072      /**
5073       * <code>CHALLENGE = 3;</code>
5074       */
5075      public static final int CHALLENGE_VALUE = 3;
5076      /**
5077       * <code>RESPONSE = 4;</code>
5078       */
5079      public static final int RESPONSE_VALUE = 4;
5080      /**
5081       * <code>WRAP = 5;</code>
5082       */
5083      public static final int WRAP_VALUE = 5;
5084
5085
5086      public final int getNumber() { return value; }
5087
5088      public static SaslState valueOf(int value) {
5089        switch (value) {
5090          case 0: return SUCCESS;
5091          case 1: return NEGOTIATE;
5092          case 2: return INITIATE;
5093          case 3: return CHALLENGE;
5094          case 4: return RESPONSE;
5095          case 5: return WRAP;
5096          default: return null;
5097        }
5098      }
5099
5100      public static com.google.protobuf.Internal.EnumLiteMap<SaslState>
5101          internalGetValueMap() {
5102        return internalValueMap;
5103      }
5104      private static com.google.protobuf.Internal.EnumLiteMap<SaslState>
5105          internalValueMap =
5106            new com.google.protobuf.Internal.EnumLiteMap<SaslState>() {
5107              public SaslState findValueByNumber(int number) {
5108                return SaslState.valueOf(number);
5109              }
5110            };
5111
5112      public final com.google.protobuf.Descriptors.EnumValueDescriptor
5113          getValueDescriptor() {
5114        return getDescriptor().getValues().get(index);
5115      }
5116      public final com.google.protobuf.Descriptors.EnumDescriptor
5117          getDescriptorForType() {
5118        return getDescriptor();
5119      }
5120      public static final com.google.protobuf.Descriptors.EnumDescriptor
5121          getDescriptor() {
5122        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.getDescriptor().getEnumTypes().get(0);
5123      }
5124
5125      private static final SaslState[] VALUES = values();
5126
5127      public static SaslState valueOf(
5128          com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
5129        if (desc.getType() != getDescriptor()) {
5130          throw new java.lang.IllegalArgumentException(
5131            "EnumValueDescriptor is not for this type.");
5132        }
5133        return VALUES[desc.getIndex()];
5134      }
5135
5136      private final int index;
5137      private final int value;
5138
5139      private SaslState(int index, int value) {
5140        this.index = index;
5141        this.value = value;
5142      }
5143
5144      // @@protoc_insertion_point(enum_scope:hadoop.common.RpcSaslProto.SaslState)
5145    }
5146
5147    public interface SaslAuthOrBuilder
5148        extends com.google.protobuf.MessageOrBuilder {
5149
5150      // required string method = 1;
5151      /**
5152       * <code>required string method = 1;</code>
5153       */
5154      boolean hasMethod();
5155      /**
5156       * <code>required string method = 1;</code>
5157       */
5158      java.lang.String getMethod();
5159      /**
5160       * <code>required string method = 1;</code>
5161       */
5162      com.google.protobuf.ByteString
5163          getMethodBytes();
5164
5165      // required string mechanism = 2;
5166      /**
5167       * <code>required string mechanism = 2;</code>
5168       */
5169      boolean hasMechanism();
5170      /**
5171       * <code>required string mechanism = 2;</code>
5172       */
5173      java.lang.String getMechanism();
5174      /**
5175       * <code>required string mechanism = 2;</code>
5176       */
5177      com.google.protobuf.ByteString
5178          getMechanismBytes();
5179
5180      // optional string protocol = 3;
5181      /**
5182       * <code>optional string protocol = 3;</code>
5183       */
5184      boolean hasProtocol();
5185      /**
5186       * <code>optional string protocol = 3;</code>
5187       */
5188      java.lang.String getProtocol();
5189      /**
5190       * <code>optional string protocol = 3;</code>
5191       */
5192      com.google.protobuf.ByteString
5193          getProtocolBytes();
5194
5195      // optional string serverId = 4;
5196      /**
5197       * <code>optional string serverId = 4;</code>
5198       */
5199      boolean hasServerId();
5200      /**
5201       * <code>optional string serverId = 4;</code>
5202       */
5203      java.lang.String getServerId();
5204      /**
5205       * <code>optional string serverId = 4;</code>
5206       */
5207      com.google.protobuf.ByteString
5208          getServerIdBytes();
5209
5210      // optional bytes challenge = 5;
5211      /**
5212       * <code>optional bytes challenge = 5;</code>
5213       */
5214      boolean hasChallenge();
5215      /**
5216       * <code>optional bytes challenge = 5;</code>
5217       */
5218      com.google.protobuf.ByteString getChallenge();
5219    }
5220    /**
5221     * Protobuf type {@code hadoop.common.RpcSaslProto.SaslAuth}
5222     */
5223    public static final class SaslAuth extends
5224        com.google.protobuf.GeneratedMessage
5225        implements SaslAuthOrBuilder {
5226      // Use SaslAuth.newBuilder() to construct.
5227      private SaslAuth(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
5228        super(builder);
5229        this.unknownFields = builder.getUnknownFields();
5230      }
5231      private SaslAuth(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
5232
5233      private static final SaslAuth defaultInstance;
5234      public static SaslAuth getDefaultInstance() {
5235        return defaultInstance;
5236      }
5237
5238      public SaslAuth getDefaultInstanceForType() {
5239        return defaultInstance;
5240      }
5241
5242      private final com.google.protobuf.UnknownFieldSet unknownFields;
5243      @java.lang.Override
5244      public final com.google.protobuf.UnknownFieldSet
5245          getUnknownFields() {
5246        return this.unknownFields;
5247      }
5248      private SaslAuth(
5249          com.google.protobuf.CodedInputStream input,
5250          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5251          throws com.google.protobuf.InvalidProtocolBufferException {
5252        initFields();
5253        int mutable_bitField0_ = 0;
5254        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
5255            com.google.protobuf.UnknownFieldSet.newBuilder();
5256        try {
5257          boolean done = false;
5258          while (!done) {
5259            int tag = input.readTag();
5260            switch (tag) {
5261              case 0:
5262                done = true;
5263                break;
5264              default: {
5265                if (!parseUnknownField(input, unknownFields,
5266                                       extensionRegistry, tag)) {
5267                  done = true;
5268                }
5269                break;
5270              }
5271              case 10: {
5272                bitField0_ |= 0x00000001;
5273                method_ = input.readBytes();
5274                break;
5275              }
5276              case 18: {
5277                bitField0_ |= 0x00000002;
5278                mechanism_ = input.readBytes();
5279                break;
5280              }
5281              case 26: {
5282                bitField0_ |= 0x00000004;
5283                protocol_ = input.readBytes();
5284                break;
5285              }
5286              case 34: {
5287                bitField0_ |= 0x00000008;
5288                serverId_ = input.readBytes();
5289                break;
5290              }
5291              case 42: {
5292                bitField0_ |= 0x00000010;
5293                challenge_ = input.readBytes();
5294                break;
5295              }
5296            }
5297          }
5298        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
5299          throw e.setUnfinishedMessage(this);
5300        } catch (java.io.IOException e) {
5301          throw new com.google.protobuf.InvalidProtocolBufferException(
5302              e.getMessage()).setUnfinishedMessage(this);
5303        } finally {
5304          this.unknownFields = unknownFields.build();
5305          makeExtensionsImmutable();
5306        }
5307      }
5308      public static final com.google.protobuf.Descriptors.Descriptor
5309          getDescriptor() {
5310        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcSaslProto_SaslAuth_descriptor;
5311      }
5312
5313      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
5314          internalGetFieldAccessorTable() {
5315        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcSaslProto_SaslAuth_fieldAccessorTable
5316            .ensureFieldAccessorsInitialized(
5317                org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder.class);
5318      }
5319
5320      public static com.google.protobuf.Parser<SaslAuth> PARSER =
5321          new com.google.protobuf.AbstractParser<SaslAuth>() {
5322        public SaslAuth parsePartialFrom(
5323            com.google.protobuf.CodedInputStream input,
5324            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5325            throws com.google.protobuf.InvalidProtocolBufferException {
5326          return new SaslAuth(input, extensionRegistry);
5327        }
5328      };
5329
5330      @java.lang.Override
5331      public com.google.protobuf.Parser<SaslAuth> getParserForType() {
5332        return PARSER;
5333      }
5334
5335      private int bitField0_;
5336      // required string method = 1;
5337      public static final int METHOD_FIELD_NUMBER = 1;
5338      private java.lang.Object method_;
5339      /**
5340       * <code>required string method = 1;</code>
5341       */
5342      public boolean hasMethod() {
5343        return ((bitField0_ & 0x00000001) == 0x00000001);
5344      }
5345      /**
5346       * <code>required string method = 1;</code>
5347       */
5348      public java.lang.String getMethod() {
5349        java.lang.Object ref = method_;
5350        if (ref instanceof java.lang.String) {
5351          return (java.lang.String) ref;
5352        } else {
5353          com.google.protobuf.ByteString bs = 
5354              (com.google.protobuf.ByteString) ref;
5355          java.lang.String s = bs.toStringUtf8();
5356          if (bs.isValidUtf8()) {
5357            method_ = s;
5358          }
5359          return s;
5360        }
5361      }
5362      /**
5363       * <code>required string method = 1;</code>
5364       */
5365      public com.google.protobuf.ByteString
5366          getMethodBytes() {
5367        java.lang.Object ref = method_;
5368        if (ref instanceof java.lang.String) {
5369          com.google.protobuf.ByteString b = 
5370              com.google.protobuf.ByteString.copyFromUtf8(
5371                  (java.lang.String) ref);
5372          method_ = b;
5373          return b;
5374        } else {
5375          return (com.google.protobuf.ByteString) ref;
5376        }
5377      }
5378
5379      // required string mechanism = 2;
5380      public static final int MECHANISM_FIELD_NUMBER = 2;
5381      private java.lang.Object mechanism_;
5382      /**
5383       * <code>required string mechanism = 2;</code>
5384       */
5385      public boolean hasMechanism() {
5386        return ((bitField0_ & 0x00000002) == 0x00000002);
5387      }
5388      /**
5389       * <code>required string mechanism = 2;</code>
5390       */
5391      public java.lang.String getMechanism() {
5392        java.lang.Object ref = mechanism_;
5393        if (ref instanceof java.lang.String) {
5394          return (java.lang.String) ref;
5395        } else {
5396          com.google.protobuf.ByteString bs = 
5397              (com.google.protobuf.ByteString) ref;
5398          java.lang.String s = bs.toStringUtf8();
5399          if (bs.isValidUtf8()) {
5400            mechanism_ = s;
5401          }
5402          return s;
5403        }
5404      }
5405      /**
5406       * <code>required string mechanism = 2;</code>
5407       */
5408      public com.google.protobuf.ByteString
5409          getMechanismBytes() {
5410        java.lang.Object ref = mechanism_;
5411        if (ref instanceof java.lang.String) {
5412          com.google.protobuf.ByteString b = 
5413              com.google.protobuf.ByteString.copyFromUtf8(
5414                  (java.lang.String) ref);
5415          mechanism_ = b;
5416          return b;
5417        } else {
5418          return (com.google.protobuf.ByteString) ref;
5419        }
5420      }
5421
5422      // optional string protocol = 3;
5423      public static final int PROTOCOL_FIELD_NUMBER = 3;
5424      private java.lang.Object protocol_;
5425      /**
5426       * <code>optional string protocol = 3;</code>
5427       */
5428      public boolean hasProtocol() {
5429        return ((bitField0_ & 0x00000004) == 0x00000004);
5430      }
5431      /**
5432       * <code>optional string protocol = 3;</code>
5433       */
5434      public java.lang.String getProtocol() {
5435        java.lang.Object ref = protocol_;
5436        if (ref instanceof java.lang.String) {
5437          return (java.lang.String) ref;
5438        } else {
5439          com.google.protobuf.ByteString bs = 
5440              (com.google.protobuf.ByteString) ref;
5441          java.lang.String s = bs.toStringUtf8();
5442          if (bs.isValidUtf8()) {
5443            protocol_ = s;
5444          }
5445          return s;
5446        }
5447      }
5448      /**
5449       * <code>optional string protocol = 3;</code>
5450       */
5451      public com.google.protobuf.ByteString
5452          getProtocolBytes() {
5453        java.lang.Object ref = protocol_;
5454        if (ref instanceof java.lang.String) {
5455          com.google.protobuf.ByteString b = 
5456              com.google.protobuf.ByteString.copyFromUtf8(
5457                  (java.lang.String) ref);
5458          protocol_ = b;
5459          return b;
5460        } else {
5461          return (com.google.protobuf.ByteString) ref;
5462        }
5463      }
5464
5465      // optional string serverId = 4;
5466      public static final int SERVERID_FIELD_NUMBER = 4;
5467      private java.lang.Object serverId_;
5468      /**
5469       * <code>optional string serverId = 4;</code>
5470       */
5471      public boolean hasServerId() {
5472        return ((bitField0_ & 0x00000008) == 0x00000008);
5473      }
5474      /**
5475       * <code>optional string serverId = 4;</code>
5476       */
5477      public java.lang.String getServerId() {
5478        java.lang.Object ref = serverId_;
5479        if (ref instanceof java.lang.String) {
5480          return (java.lang.String) ref;
5481        } else {
5482          com.google.protobuf.ByteString bs = 
5483              (com.google.protobuf.ByteString) ref;
5484          java.lang.String s = bs.toStringUtf8();
5485          if (bs.isValidUtf8()) {
5486            serverId_ = s;
5487          }
5488          return s;
5489        }
5490      }
5491      /**
5492       * <code>optional string serverId = 4;</code>
5493       */
5494      public com.google.protobuf.ByteString
5495          getServerIdBytes() {
5496        java.lang.Object ref = serverId_;
5497        if (ref instanceof java.lang.String) {
5498          com.google.protobuf.ByteString b = 
5499              com.google.protobuf.ByteString.copyFromUtf8(
5500                  (java.lang.String) ref);
5501          serverId_ = b;
5502          return b;
5503        } else {
5504          return (com.google.protobuf.ByteString) ref;
5505        }
5506      }
5507
5508      // optional bytes challenge = 5;
5509      public static final int CHALLENGE_FIELD_NUMBER = 5;
5510      private com.google.protobuf.ByteString challenge_;
5511      /**
5512       * <code>optional bytes challenge = 5;</code>
5513       */
5514      public boolean hasChallenge() {
5515        return ((bitField0_ & 0x00000010) == 0x00000010);
5516      }
5517      /**
5518       * <code>optional bytes challenge = 5;</code>
5519       */
5520      public com.google.protobuf.ByteString getChallenge() {
5521        return challenge_;
5522      }
5523
5524      private void initFields() {
5525        method_ = "";
5526        mechanism_ = "";
5527        protocol_ = "";
5528        serverId_ = "";
5529        challenge_ = com.google.protobuf.ByteString.EMPTY;
5530      }
5531      private byte memoizedIsInitialized = -1;
5532      public final boolean isInitialized() {
5533        byte isInitialized = memoizedIsInitialized;
5534        if (isInitialized != -1) return isInitialized == 1;
5535
5536        if (!hasMethod()) {
5537          memoizedIsInitialized = 0;
5538          return false;
5539        }
5540        if (!hasMechanism()) {
5541          memoizedIsInitialized = 0;
5542          return false;
5543        }
5544        memoizedIsInitialized = 1;
5545        return true;
5546      }
5547
5548      public void writeTo(com.google.protobuf.CodedOutputStream output)
5549                          throws java.io.IOException {
5550        getSerializedSize();
5551        if (((bitField0_ & 0x00000001) == 0x00000001)) {
5552          output.writeBytes(1, getMethodBytes());
5553        }
5554        if (((bitField0_ & 0x00000002) == 0x00000002)) {
5555          output.writeBytes(2, getMechanismBytes());
5556        }
5557        if (((bitField0_ & 0x00000004) == 0x00000004)) {
5558          output.writeBytes(3, getProtocolBytes());
5559        }
5560        if (((bitField0_ & 0x00000008) == 0x00000008)) {
5561          output.writeBytes(4, getServerIdBytes());
5562        }
5563        if (((bitField0_ & 0x00000010) == 0x00000010)) {
5564          output.writeBytes(5, challenge_);
5565        }
5566        getUnknownFields().writeTo(output);
5567      }
5568
5569      private int memoizedSerializedSize = -1;
5570      public int getSerializedSize() {
5571        int size = memoizedSerializedSize;
5572        if (size != -1) return size;
5573
5574        size = 0;
5575        if (((bitField0_ & 0x00000001) == 0x00000001)) {
5576          size += com.google.protobuf.CodedOutputStream
5577            .computeBytesSize(1, getMethodBytes());
5578        }
5579        if (((bitField0_ & 0x00000002) == 0x00000002)) {
5580          size += com.google.protobuf.CodedOutputStream
5581            .computeBytesSize(2, getMechanismBytes());
5582        }
5583        if (((bitField0_ & 0x00000004) == 0x00000004)) {
5584          size += com.google.protobuf.CodedOutputStream
5585            .computeBytesSize(3, getProtocolBytes());
5586        }
5587        if (((bitField0_ & 0x00000008) == 0x00000008)) {
5588          size += com.google.protobuf.CodedOutputStream
5589            .computeBytesSize(4, getServerIdBytes());
5590        }
5591        if (((bitField0_ & 0x00000010) == 0x00000010)) {
5592          size += com.google.protobuf.CodedOutputStream
5593            .computeBytesSize(5, challenge_);
5594        }
5595        size += getUnknownFields().getSerializedSize();
5596        memoizedSerializedSize = size;
5597        return size;
5598      }
5599
5600      private static final long serialVersionUID = 0L;
5601      @java.lang.Override
5602      protected java.lang.Object writeReplace()
5603          throws java.io.ObjectStreamException {
5604        return super.writeReplace();
5605      }
5606
5607      @java.lang.Override
5608      public boolean equals(final java.lang.Object obj) {
5609        if (obj == this) {
5610         return true;
5611        }
5612        if (!(obj instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth)) {
5613          return super.equals(obj);
5614        }
5615        org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth other = (org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth) obj;
5616
5617        boolean result = true;
5618        result = result && (hasMethod() == other.hasMethod());
5619        if (hasMethod()) {
5620          result = result && getMethod()
5621              .equals(other.getMethod());
5622        }
5623        result = result && (hasMechanism() == other.hasMechanism());
5624        if (hasMechanism()) {
5625          result = result && getMechanism()
5626              .equals(other.getMechanism());
5627        }
5628        result = result && (hasProtocol() == other.hasProtocol());
5629        if (hasProtocol()) {
5630          result = result && getProtocol()
5631              .equals(other.getProtocol());
5632        }
5633        result = result && (hasServerId() == other.hasServerId());
5634        if (hasServerId()) {
5635          result = result && getServerId()
5636              .equals(other.getServerId());
5637        }
5638        result = result && (hasChallenge() == other.hasChallenge());
5639        if (hasChallenge()) {
5640          result = result && getChallenge()
5641              .equals(other.getChallenge());
5642        }
5643        result = result &&
5644            getUnknownFields().equals(other.getUnknownFields());
5645        return result;
5646      }
5647
5648      private int memoizedHashCode = 0;
5649      @java.lang.Override
5650      public int hashCode() {
5651        if (memoizedHashCode != 0) {
5652          return memoizedHashCode;
5653        }
5654        int hash = 41;
5655        hash = (19 * hash) + getDescriptorForType().hashCode();
5656        if (hasMethod()) {
5657          hash = (37 * hash) + METHOD_FIELD_NUMBER;
5658          hash = (53 * hash) + getMethod().hashCode();
5659        }
5660        if (hasMechanism()) {
5661          hash = (37 * hash) + MECHANISM_FIELD_NUMBER;
5662          hash = (53 * hash) + getMechanism().hashCode();
5663        }
5664        if (hasProtocol()) {
5665          hash = (37 * hash) + PROTOCOL_FIELD_NUMBER;
5666          hash = (53 * hash) + getProtocol().hashCode();
5667        }
5668        if (hasServerId()) {
5669          hash = (37 * hash) + SERVERID_FIELD_NUMBER;
5670          hash = (53 * hash) + getServerId().hashCode();
5671        }
5672        if (hasChallenge()) {
5673          hash = (37 * hash) + CHALLENGE_FIELD_NUMBER;
5674          hash = (53 * hash) + getChallenge().hashCode();
5675        }
5676        hash = (29 * hash) + getUnknownFields().hashCode();
5677        memoizedHashCode = hash;
5678        return hash;
5679      }
5680
5681      public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parseFrom(
5682          com.google.protobuf.ByteString data)
5683          throws com.google.protobuf.InvalidProtocolBufferException {
5684        return PARSER.parseFrom(data);
5685      }
5686      public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parseFrom(
5687          com.google.protobuf.ByteString data,
5688          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5689          throws com.google.protobuf.InvalidProtocolBufferException {
5690        return PARSER.parseFrom(data, extensionRegistry);
5691      }
5692      public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parseFrom(byte[] data)
5693          throws com.google.protobuf.InvalidProtocolBufferException {
5694        return PARSER.parseFrom(data);
5695      }
5696      public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parseFrom(
5697          byte[] data,
5698          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5699          throws com.google.protobuf.InvalidProtocolBufferException {
5700        return PARSER.parseFrom(data, extensionRegistry);
5701      }
5702      public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parseFrom(java.io.InputStream input)
5703          throws java.io.IOException {
5704        return PARSER.parseFrom(input);
5705      }
5706      public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parseFrom(
5707          java.io.InputStream input,
5708          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5709          throws java.io.IOException {
5710        return PARSER.parseFrom(input, extensionRegistry);
5711      }
5712      public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parseDelimitedFrom(java.io.InputStream input)
5713          throws java.io.IOException {
5714        return PARSER.parseDelimitedFrom(input);
5715      }
5716      public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parseDelimitedFrom(
5717          java.io.InputStream input,
5718          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5719          throws java.io.IOException {
5720        return PARSER.parseDelimitedFrom(input, extensionRegistry);
5721      }
5722      public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parseFrom(
5723          com.google.protobuf.CodedInputStream input)
5724          throws java.io.IOException {
5725        return PARSER.parseFrom(input);
5726      }
5727      public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parseFrom(
5728          com.google.protobuf.CodedInputStream input,
5729          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5730          throws java.io.IOException {
5731        return PARSER.parseFrom(input, extensionRegistry);
5732      }
5733
5734      public static Builder newBuilder() { return Builder.create(); }
5735      public Builder newBuilderForType() { return newBuilder(); }
5736      public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth prototype) {
5737        return newBuilder().mergeFrom(prototype);
5738      }
5739      public Builder toBuilder() { return newBuilder(this); }
5740
5741      @java.lang.Override
5742      protected Builder newBuilderForType(
5743          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
5744        Builder builder = new Builder(parent);
5745        return builder;
5746      }
5747      /**
5748       * Protobuf type {@code hadoop.common.RpcSaslProto.SaslAuth}
5749       */
5750      public static final class Builder extends
5751          com.google.protobuf.GeneratedMessage.Builder<Builder>
5752         implements org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuthOrBuilder {
5753        public static final com.google.protobuf.Descriptors.Descriptor
5754            getDescriptor() {
5755          return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcSaslProto_SaslAuth_descriptor;
5756        }
5757
5758        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
5759            internalGetFieldAccessorTable() {
5760          return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcSaslProto_SaslAuth_fieldAccessorTable
5761              .ensureFieldAccessorsInitialized(
5762                  org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder.class);
5763        }
5764
5765        // Construct using org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.newBuilder()
5766        private Builder() {
5767          maybeForceBuilderInitialization();
5768        }
5769
5770        private Builder(
5771            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
5772          super(parent);
5773          maybeForceBuilderInitialization();
5774        }
5775        private void maybeForceBuilderInitialization() {
5776          if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
5777          }
5778        }
5779        private static Builder create() {
5780          return new Builder();
5781        }
5782
5783        public Builder clear() {
5784          super.clear();
5785          method_ = "";
5786          bitField0_ = (bitField0_ & ~0x00000001);
5787          mechanism_ = "";
5788          bitField0_ = (bitField0_ & ~0x00000002);
5789          protocol_ = "";
5790          bitField0_ = (bitField0_ & ~0x00000004);
5791          serverId_ = "";
5792          bitField0_ = (bitField0_ & ~0x00000008);
5793          challenge_ = com.google.protobuf.ByteString.EMPTY;
5794          bitField0_ = (bitField0_ & ~0x00000010);
5795          return this;
5796        }
5797
5798        public Builder clone() {
5799          return create().mergeFrom(buildPartial());
5800        }
5801
5802        public com.google.protobuf.Descriptors.Descriptor
5803            getDescriptorForType() {
5804          return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcSaslProto_SaslAuth_descriptor;
5805        }
5806
5807        public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth getDefaultInstanceForType() {
5808          return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.getDefaultInstance();
5809        }
5810
5811        public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth build() {
5812          org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth result = buildPartial();
5813          if (!result.isInitialized()) {
5814            throw newUninitializedMessageException(result);
5815          }
5816          return result;
5817        }
5818
5819        public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth buildPartial() {
5820          org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth result = new org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth(this);
5821          int from_bitField0_ = bitField0_;
5822          int to_bitField0_ = 0;
5823          if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
5824            to_bitField0_ |= 0x00000001;
5825          }
5826          result.method_ = method_;
5827          if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
5828            to_bitField0_ |= 0x00000002;
5829          }
5830          result.mechanism_ = mechanism_;
5831          if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
5832            to_bitField0_ |= 0x00000004;
5833          }
5834          result.protocol_ = protocol_;
5835          if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
5836            to_bitField0_ |= 0x00000008;
5837          }
5838          result.serverId_ = serverId_;
5839          if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
5840            to_bitField0_ |= 0x00000010;
5841          }
5842          result.challenge_ = challenge_;
5843          result.bitField0_ = to_bitField0_;
5844          onBuilt();
5845          return result;
5846        }
5847
5848        public Builder mergeFrom(com.google.protobuf.Message other) {
5849          if (other instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth) {
5850            return mergeFrom((org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth)other);
5851          } else {
5852            super.mergeFrom(other);
5853            return this;
5854          }
5855        }
5856
5857        public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth other) {
5858          if (other == org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.getDefaultInstance()) return this;
5859          if (other.hasMethod()) {
5860            bitField0_ |= 0x00000001;
5861            method_ = other.method_;
5862            onChanged();
5863          }
5864          if (other.hasMechanism()) {
5865            bitField0_ |= 0x00000002;
5866            mechanism_ = other.mechanism_;
5867            onChanged();
5868          }
5869          if (other.hasProtocol()) {
5870            bitField0_ |= 0x00000004;
5871            protocol_ = other.protocol_;
5872            onChanged();
5873          }
5874          if (other.hasServerId()) {
5875            bitField0_ |= 0x00000008;
5876            serverId_ = other.serverId_;
5877            onChanged();
5878          }
5879          if (other.hasChallenge()) {
5880            setChallenge(other.getChallenge());
5881          }
5882          this.mergeUnknownFields(other.getUnknownFields());
5883          return this;
5884        }
5885
5886        public final boolean isInitialized() {
5887          if (!hasMethod()) {
5888            
5889            return false;
5890          }
5891          if (!hasMechanism()) {
5892            
5893            return false;
5894          }
5895          return true;
5896        }
5897
5898        public Builder mergeFrom(
5899            com.google.protobuf.CodedInputStream input,
5900            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5901            throws java.io.IOException {
5902          org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth parsedMessage = null;
5903          try {
5904            parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
5905          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
5906            parsedMessage = (org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth) e.getUnfinishedMessage();
5907            throw e;
5908          } finally {
5909            if (parsedMessage != null) {
5910              mergeFrom(parsedMessage);
5911            }
5912          }
5913          return this;
5914        }
5915        private int bitField0_;
5916
5917        // required string method = 1;
5918        private java.lang.Object method_ = "";
5919        /**
5920         * <code>required string method = 1;</code>
5921         */
5922        public boolean hasMethod() {
5923          return ((bitField0_ & 0x00000001) == 0x00000001);
5924        }
5925        /**
5926         * <code>required string method = 1;</code>
5927         */
5928        public java.lang.String getMethod() {
5929          java.lang.Object ref = method_;
5930          if (!(ref instanceof java.lang.String)) {
5931            java.lang.String s = ((com.google.protobuf.ByteString) ref)
5932                .toStringUtf8();
5933            method_ = s;
5934            return s;
5935          } else {
5936            return (java.lang.String) ref;
5937          }
5938        }
5939        /**
5940         * <code>required string method = 1;</code>
5941         */
5942        public com.google.protobuf.ByteString
5943            getMethodBytes() {
5944          java.lang.Object ref = method_;
5945          if (ref instanceof String) {
5946            com.google.protobuf.ByteString b = 
5947                com.google.protobuf.ByteString.copyFromUtf8(
5948                    (java.lang.String) ref);
5949            method_ = b;
5950            return b;
5951          } else {
5952            return (com.google.protobuf.ByteString) ref;
5953          }
5954        }
5955        /**
5956         * <code>required string method = 1;</code>
5957         */
5958        public Builder setMethod(
5959            java.lang.String value) {
5960          if (value == null) {
5961    throw new NullPointerException();
5962  }
5963  bitField0_ |= 0x00000001;
5964          method_ = value;
5965          onChanged();
5966          return this;
5967        }
5968        /**
5969         * <code>required string method = 1;</code>
5970         */
5971        public Builder clearMethod() {
5972          bitField0_ = (bitField0_ & ~0x00000001);
5973          method_ = getDefaultInstance().getMethod();
5974          onChanged();
5975          return this;
5976        }
5977        /**
5978         * <code>required string method = 1;</code>
5979         */
5980        public Builder setMethodBytes(
5981            com.google.protobuf.ByteString value) {
5982          if (value == null) {
5983    throw new NullPointerException();
5984  }
5985  bitField0_ |= 0x00000001;
5986          method_ = value;
5987          onChanged();
5988          return this;
5989        }
5990
5991        // required string mechanism = 2;
5992        private java.lang.Object mechanism_ = "";
5993        /**
5994         * <code>required string mechanism = 2;</code>
5995         */
5996        public boolean hasMechanism() {
5997          return ((bitField0_ & 0x00000002) == 0x00000002);
5998        }
5999        /**
6000         * <code>required string mechanism = 2;</code>
6001         */
6002        public java.lang.String getMechanism() {
6003          java.lang.Object ref = mechanism_;
6004          if (!(ref instanceof java.lang.String)) {
6005            java.lang.String s = ((com.google.protobuf.ByteString) ref)
6006                .toStringUtf8();
6007            mechanism_ = s;
6008            return s;
6009          } else {
6010            return (java.lang.String) ref;
6011          }
6012        }
6013        /**
6014         * <code>required string mechanism = 2;</code>
6015         */
6016        public com.google.protobuf.ByteString
6017            getMechanismBytes() {
6018          java.lang.Object ref = mechanism_;
6019          if (ref instanceof String) {
6020            com.google.protobuf.ByteString b = 
6021                com.google.protobuf.ByteString.copyFromUtf8(
6022                    (java.lang.String) ref);
6023            mechanism_ = b;
6024            return b;
6025          } else {
6026            return (com.google.protobuf.ByteString) ref;
6027          }
6028        }
6029        /**
6030         * <code>required string mechanism = 2;</code>
6031         */
6032        public Builder setMechanism(
6033            java.lang.String value) {
6034          if (value == null) {
6035    throw new NullPointerException();
6036  }
6037  bitField0_ |= 0x00000002;
6038          mechanism_ = value;
6039          onChanged();
6040          return this;
6041        }
6042        /**
6043         * <code>required string mechanism = 2;</code>
6044         */
6045        public Builder clearMechanism() {
6046          bitField0_ = (bitField0_ & ~0x00000002);
6047          mechanism_ = getDefaultInstance().getMechanism();
6048          onChanged();
6049          return this;
6050        }
6051        /**
6052         * <code>required string mechanism = 2;</code>
6053         */
6054        public Builder setMechanismBytes(
6055            com.google.protobuf.ByteString value) {
6056          if (value == null) {
6057    throw new NullPointerException();
6058  }
6059  bitField0_ |= 0x00000002;
6060          mechanism_ = value;
6061          onChanged();
6062          return this;
6063        }
6064
6065        // optional string protocol = 3;
6066        private java.lang.Object protocol_ = "";
6067        /**
6068         * <code>optional string protocol = 3;</code>
6069         */
6070        public boolean hasProtocol() {
6071          return ((bitField0_ & 0x00000004) == 0x00000004);
6072        }
6073        /**
6074         * <code>optional string protocol = 3;</code>
6075         */
6076        public java.lang.String getProtocol() {
6077          java.lang.Object ref = protocol_;
6078          if (!(ref instanceof java.lang.String)) {
6079            java.lang.String s = ((com.google.protobuf.ByteString) ref)
6080                .toStringUtf8();
6081            protocol_ = s;
6082            return s;
6083          } else {
6084            return (java.lang.String) ref;
6085          }
6086        }
6087        /**
6088         * <code>optional string protocol = 3;</code>
6089         */
6090        public com.google.protobuf.ByteString
6091            getProtocolBytes() {
6092          java.lang.Object ref = protocol_;
6093          if (ref instanceof String) {
6094            com.google.protobuf.ByteString b = 
6095                com.google.protobuf.ByteString.copyFromUtf8(
6096                    (java.lang.String) ref);
6097            protocol_ = b;
6098            return b;
6099          } else {
6100            return (com.google.protobuf.ByteString) ref;
6101          }
6102        }
6103        /**
6104         * <code>optional string protocol = 3;</code>
6105         */
6106        public Builder setProtocol(
6107            java.lang.String value) {
6108          if (value == null) {
6109    throw new NullPointerException();
6110  }
6111  bitField0_ |= 0x00000004;
6112          protocol_ = value;
6113          onChanged();
6114          return this;
6115        }
6116        /**
6117         * <code>optional string protocol = 3;</code>
6118         */
6119        public Builder clearProtocol() {
6120          bitField0_ = (bitField0_ & ~0x00000004);
6121          protocol_ = getDefaultInstance().getProtocol();
6122          onChanged();
6123          return this;
6124        }
6125        /**
6126         * <code>optional string protocol = 3;</code>
6127         */
6128        public Builder setProtocolBytes(
6129            com.google.protobuf.ByteString value) {
6130          if (value == null) {
6131    throw new NullPointerException();
6132  }
6133  bitField0_ |= 0x00000004;
6134          protocol_ = value;
6135          onChanged();
6136          return this;
6137        }
6138
6139        // optional string serverId = 4;
6140        private java.lang.Object serverId_ = "";
6141        /**
6142         * <code>optional string serverId = 4;</code>
6143         */
6144        public boolean hasServerId() {
6145          return ((bitField0_ & 0x00000008) == 0x00000008);
6146        }
6147        /**
6148         * <code>optional string serverId = 4;</code>
6149         */
6150        public java.lang.String getServerId() {
6151          java.lang.Object ref = serverId_;
6152          if (!(ref instanceof java.lang.String)) {
6153            java.lang.String s = ((com.google.protobuf.ByteString) ref)
6154                .toStringUtf8();
6155            serverId_ = s;
6156            return s;
6157          } else {
6158            return (java.lang.String) ref;
6159          }
6160        }
6161        /**
6162         * <code>optional string serverId = 4;</code>
6163         */
6164        public com.google.protobuf.ByteString
6165            getServerIdBytes() {
6166          java.lang.Object ref = serverId_;
6167          if (ref instanceof String) {
6168            com.google.protobuf.ByteString b = 
6169                com.google.protobuf.ByteString.copyFromUtf8(
6170                    (java.lang.String) ref);
6171            serverId_ = b;
6172            return b;
6173          } else {
6174            return (com.google.protobuf.ByteString) ref;
6175          }
6176        }
6177        /**
6178         * <code>optional string serverId = 4;</code>
6179         */
6180        public Builder setServerId(
6181            java.lang.String value) {
6182          if (value == null) {
6183    throw new NullPointerException();
6184  }
6185  bitField0_ |= 0x00000008;
6186          serverId_ = value;
6187          onChanged();
6188          return this;
6189        }
6190        /**
6191         * <code>optional string serverId = 4;</code>
6192         */
6193        public Builder clearServerId() {
6194          bitField0_ = (bitField0_ & ~0x00000008);
6195          serverId_ = getDefaultInstance().getServerId();
6196          onChanged();
6197          return this;
6198        }
6199        /**
6200         * <code>optional string serverId = 4;</code>
6201         */
6202        public Builder setServerIdBytes(
6203            com.google.protobuf.ByteString value) {
6204          if (value == null) {
6205    throw new NullPointerException();
6206  }
6207  bitField0_ |= 0x00000008;
6208          serverId_ = value;
6209          onChanged();
6210          return this;
6211        }
6212
6213        // optional bytes challenge = 5;
6214        private com.google.protobuf.ByteString challenge_ = com.google.protobuf.ByteString.EMPTY;
6215        /**
6216         * <code>optional bytes challenge = 5;</code>
6217         */
6218        public boolean hasChallenge() {
6219          return ((bitField0_ & 0x00000010) == 0x00000010);
6220        }
6221        /**
6222         * <code>optional bytes challenge = 5;</code>
6223         */
6224        public com.google.protobuf.ByteString getChallenge() {
6225          return challenge_;
6226        }
6227        /**
6228         * <code>optional bytes challenge = 5;</code>
6229         */
6230        public Builder setChallenge(com.google.protobuf.ByteString value) {
6231          if (value == null) {
6232    throw new NullPointerException();
6233  }
6234  bitField0_ |= 0x00000010;
6235          challenge_ = value;
6236          onChanged();
6237          return this;
6238        }
6239        /**
6240         * <code>optional bytes challenge = 5;</code>
6241         */
6242        public Builder clearChallenge() {
6243          bitField0_ = (bitField0_ & ~0x00000010);
6244          challenge_ = getDefaultInstance().getChallenge();
6245          onChanged();
6246          return this;
6247        }
6248
6249        // @@protoc_insertion_point(builder_scope:hadoop.common.RpcSaslProto.SaslAuth)
6250      }
6251
6252      static {
6253        defaultInstance = new SaslAuth(true);
6254        defaultInstance.initFields();
6255      }
6256
6257      // @@protoc_insertion_point(class_scope:hadoop.common.RpcSaslProto.SaslAuth)
6258    }
6259
6260    private int bitField0_;
6261    // optional uint32 version = 1;
6262    public static final int VERSION_FIELD_NUMBER = 1;
6263    private int version_;
6264    /**
6265     * <code>optional uint32 version = 1;</code>
6266     */
6267    public boolean hasVersion() {
6268      return ((bitField0_ & 0x00000001) == 0x00000001);
6269    }
6270    /**
6271     * <code>optional uint32 version = 1;</code>
6272     */
6273    public int getVersion() {
6274      return version_;
6275    }
6276
6277    // required .hadoop.common.RpcSaslProto.SaslState state = 2;
6278    public static final int STATE_FIELD_NUMBER = 2;
6279    private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState state_;
6280    /**
6281     * <code>required .hadoop.common.RpcSaslProto.SaslState state = 2;</code>
6282     */
6283    public boolean hasState() {
6284      return ((bitField0_ & 0x00000002) == 0x00000002);
6285    }
6286    /**
6287     * <code>required .hadoop.common.RpcSaslProto.SaslState state = 2;</code>
6288     */
6289    public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState getState() {
6290      return state_;
6291    }
6292
6293    // optional bytes token = 3;
6294    public static final int TOKEN_FIELD_NUMBER = 3;
6295    private com.google.protobuf.ByteString token_;
6296    /**
6297     * <code>optional bytes token = 3;</code>
6298     */
6299    public boolean hasToken() {
6300      return ((bitField0_ & 0x00000004) == 0x00000004);
6301    }
6302    /**
6303     * <code>optional bytes token = 3;</code>
6304     */
6305    public com.google.protobuf.ByteString getToken() {
6306      return token_;
6307    }
6308
6309    // repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
6310    public static final int AUTHS_FIELD_NUMBER = 4;
6311    private java.util.List<org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth> auths_;
6312    /**
6313     * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
6314     */
6315    public java.util.List<org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth> getAuthsList() {
6316      return auths_;
6317    }
6318    /**
6319     * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
6320     */
6321    public java.util.List<? extends org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuthOrBuilder> 
6322        getAuthsOrBuilderList() {
6323      return auths_;
6324    }
6325    /**
6326     * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
6327     */
6328    public int getAuthsCount() {
6329      return auths_.size();
6330    }
6331    /**
6332     * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
6333     */
6334    public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth getAuths(int index) {
6335      return auths_.get(index);
6336    }
6337    /**
6338     * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
6339     */
6340    public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuthOrBuilder getAuthsOrBuilder(
6341        int index) {
6342      return auths_.get(index);
6343    }
6344
6345    private void initFields() {
6346      version_ = 0;
6347      state_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState.SUCCESS;
6348      token_ = com.google.protobuf.ByteString.EMPTY;
6349      auths_ = java.util.Collections.emptyList();
6350    }
6351    private byte memoizedIsInitialized = -1;
6352    public final boolean isInitialized() {
6353      byte isInitialized = memoizedIsInitialized;
6354      if (isInitialized != -1) return isInitialized == 1;
6355
6356      if (!hasState()) {
6357        memoizedIsInitialized = 0;
6358        return false;
6359      }
6360      for (int i = 0; i < getAuthsCount(); i++) {
6361        if (!getAuths(i).isInitialized()) {
6362          memoizedIsInitialized = 0;
6363          return false;
6364        }
6365      }
6366      memoizedIsInitialized = 1;
6367      return true;
6368    }
6369
6370    public void writeTo(com.google.protobuf.CodedOutputStream output)
6371                        throws java.io.IOException {
6372      getSerializedSize();
6373      if (((bitField0_ & 0x00000001) == 0x00000001)) {
6374        output.writeUInt32(1, version_);
6375      }
6376      if (((bitField0_ & 0x00000002) == 0x00000002)) {
6377        output.writeEnum(2, state_.getNumber());
6378      }
6379      if (((bitField0_ & 0x00000004) == 0x00000004)) {
6380        output.writeBytes(3, token_);
6381      }
6382      for (int i = 0; i < auths_.size(); i++) {
6383        output.writeMessage(4, auths_.get(i));
6384      }
6385      getUnknownFields().writeTo(output);
6386    }
6387
6388    private int memoizedSerializedSize = -1;
6389    public int getSerializedSize() {
6390      int size = memoizedSerializedSize;
6391      if (size != -1) return size;
6392
6393      size = 0;
6394      if (((bitField0_ & 0x00000001) == 0x00000001)) {
6395        size += com.google.protobuf.CodedOutputStream
6396          .computeUInt32Size(1, version_);
6397      }
6398      if (((bitField0_ & 0x00000002) == 0x00000002)) {
6399        size += com.google.protobuf.CodedOutputStream
6400          .computeEnumSize(2, state_.getNumber());
6401      }
6402      if (((bitField0_ & 0x00000004) == 0x00000004)) {
6403        size += com.google.protobuf.CodedOutputStream
6404          .computeBytesSize(3, token_);
6405      }
6406      for (int i = 0; i < auths_.size(); i++) {
6407        size += com.google.protobuf.CodedOutputStream
6408          .computeMessageSize(4, auths_.get(i));
6409      }
6410      size += getUnknownFields().getSerializedSize();
6411      memoizedSerializedSize = size;
6412      return size;
6413    }
6414
6415    private static final long serialVersionUID = 0L;
6416    @java.lang.Override
6417    protected java.lang.Object writeReplace()
6418        throws java.io.ObjectStreamException {
6419      return super.writeReplace();
6420    }
6421
6422    @java.lang.Override
6423    public boolean equals(final java.lang.Object obj) {
6424      if (obj == this) {
6425       return true;
6426      }
6427      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto)) {
6428        return super.equals(obj);
6429      }
6430      org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto other = (org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto) obj;
6431
6432      boolean result = true;
6433      result = result && (hasVersion() == other.hasVersion());
6434      if (hasVersion()) {
6435        result = result && (getVersion()
6436            == other.getVersion());
6437      }
6438      result = result && (hasState() == other.hasState());
6439      if (hasState()) {
6440        result = result &&
6441            (getState() == other.getState());
6442      }
6443      result = result && (hasToken() == other.hasToken());
6444      if (hasToken()) {
6445        result = result && getToken()
6446            .equals(other.getToken());
6447      }
6448      result = result && getAuthsList()
6449          .equals(other.getAuthsList());
6450      result = result &&
6451          getUnknownFields().equals(other.getUnknownFields());
6452      return result;
6453    }
6454
6455    private int memoizedHashCode = 0;
6456    @java.lang.Override
6457    public int hashCode() {
6458      if (memoizedHashCode != 0) {
6459        return memoizedHashCode;
6460      }
6461      int hash = 41;
6462      hash = (19 * hash) + getDescriptorForType().hashCode();
6463      if (hasVersion()) {
6464        hash = (37 * hash) + VERSION_FIELD_NUMBER;
6465        hash = (53 * hash) + getVersion();
6466      }
6467      if (hasState()) {
6468        hash = (37 * hash) + STATE_FIELD_NUMBER;
6469        hash = (53 * hash) + hashEnum(getState());
6470      }
6471      if (hasToken()) {
6472        hash = (37 * hash) + TOKEN_FIELD_NUMBER;
6473        hash = (53 * hash) + getToken().hashCode();
6474      }
6475      if (getAuthsCount() > 0) {
6476        hash = (37 * hash) + AUTHS_FIELD_NUMBER;
6477        hash = (53 * hash) + getAuthsList().hashCode();
6478      }
6479      hash = (29 * hash) + getUnknownFields().hashCode();
6480      memoizedHashCode = hash;
6481      return hash;
6482    }
6483
6484    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parseFrom(
6485        com.google.protobuf.ByteString data)
6486        throws com.google.protobuf.InvalidProtocolBufferException {
6487      return PARSER.parseFrom(data);
6488    }
6489    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parseFrom(
6490        com.google.protobuf.ByteString data,
6491        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6492        throws com.google.protobuf.InvalidProtocolBufferException {
6493      return PARSER.parseFrom(data, extensionRegistry);
6494    }
6495    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parseFrom(byte[] data)
6496        throws com.google.protobuf.InvalidProtocolBufferException {
6497      return PARSER.parseFrom(data);
6498    }
6499    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parseFrom(
6500        byte[] data,
6501        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6502        throws com.google.protobuf.InvalidProtocolBufferException {
6503      return PARSER.parseFrom(data, extensionRegistry);
6504    }
6505    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parseFrom(java.io.InputStream input)
6506        throws java.io.IOException {
6507      return PARSER.parseFrom(input);
6508    }
6509    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parseFrom(
6510        java.io.InputStream input,
6511        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6512        throws java.io.IOException {
6513      return PARSER.parseFrom(input, extensionRegistry);
6514    }
6515    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parseDelimitedFrom(java.io.InputStream input)
6516        throws java.io.IOException {
6517      return PARSER.parseDelimitedFrom(input);
6518    }
6519    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parseDelimitedFrom(
6520        java.io.InputStream input,
6521        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6522        throws java.io.IOException {
6523      return PARSER.parseDelimitedFrom(input, extensionRegistry);
6524    }
6525    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parseFrom(
6526        com.google.protobuf.CodedInputStream input)
6527        throws java.io.IOException {
6528      return PARSER.parseFrom(input);
6529    }
6530    public static org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parseFrom(
6531        com.google.protobuf.CodedInputStream input,
6532        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6533        throws java.io.IOException {
6534      return PARSER.parseFrom(input, extensionRegistry);
6535    }
6536
6537    public static Builder newBuilder() { return Builder.create(); }
6538    public Builder newBuilderForType() { return newBuilder(); }
6539    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto prototype) {
6540      return newBuilder().mergeFrom(prototype);
6541    }
6542    public Builder toBuilder() { return newBuilder(this); }
6543
6544    @java.lang.Override
6545    protected Builder newBuilderForType(
6546        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
6547      Builder builder = new Builder(parent);
6548      return builder;
6549    }
6550    /**
6551     * Protobuf type {@code hadoop.common.RpcSaslProto}
6552     */
6553    public static final class Builder extends
6554        com.google.protobuf.GeneratedMessage.Builder<Builder>
6555       implements org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProtoOrBuilder {
6556      public static final com.google.protobuf.Descriptors.Descriptor
6557          getDescriptor() {
6558        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcSaslProto_descriptor;
6559      }
6560
6561      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
6562          internalGetFieldAccessorTable() {
6563        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcSaslProto_fieldAccessorTable
6564            .ensureFieldAccessorsInitialized(
6565                org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.class, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.Builder.class);
6566      }
6567
6568      // Construct using org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.newBuilder()
6569      private Builder() {
6570        maybeForceBuilderInitialization();
6571      }
6572
6573      private Builder(
6574          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
6575        super(parent);
6576        maybeForceBuilderInitialization();
6577      }
6578      private void maybeForceBuilderInitialization() {
6579        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
6580          getAuthsFieldBuilder();
6581        }
6582      }
6583      private static Builder create() {
6584        return new Builder();
6585      }
6586
6587      public Builder clear() {
6588        super.clear();
6589        version_ = 0;
6590        bitField0_ = (bitField0_ & ~0x00000001);
6591        state_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState.SUCCESS;
6592        bitField0_ = (bitField0_ & ~0x00000002);
6593        token_ = com.google.protobuf.ByteString.EMPTY;
6594        bitField0_ = (bitField0_ & ~0x00000004);
6595        if (authsBuilder_ == null) {
6596          auths_ = java.util.Collections.emptyList();
6597          bitField0_ = (bitField0_ & ~0x00000008);
6598        } else {
6599          authsBuilder_.clear();
6600        }
6601        return this;
6602      }
6603
6604      public Builder clone() {
6605        return create().mergeFrom(buildPartial());
6606      }
6607
6608      public com.google.protobuf.Descriptors.Descriptor
6609          getDescriptorForType() {
6610        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.internal_static_hadoop_common_RpcSaslProto_descriptor;
6611      }
6612
6613      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto getDefaultInstanceForType() {
6614        return org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.getDefaultInstance();
6615      }
6616
6617      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto build() {
6618        org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto result = buildPartial();
6619        if (!result.isInitialized()) {
6620          throw newUninitializedMessageException(result);
6621        }
6622        return result;
6623      }
6624
6625      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto buildPartial() {
6626        org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto result = new org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto(this);
6627        int from_bitField0_ = bitField0_;
6628        int to_bitField0_ = 0;
6629        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
6630          to_bitField0_ |= 0x00000001;
6631        }
6632        result.version_ = version_;
6633        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
6634          to_bitField0_ |= 0x00000002;
6635        }
6636        result.state_ = state_;
6637        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
6638          to_bitField0_ |= 0x00000004;
6639        }
6640        result.token_ = token_;
6641        if (authsBuilder_ == null) {
6642          if (((bitField0_ & 0x00000008) == 0x00000008)) {
6643            auths_ = java.util.Collections.unmodifiableList(auths_);
6644            bitField0_ = (bitField0_ & ~0x00000008);
6645          }
6646          result.auths_ = auths_;
6647        } else {
6648          result.auths_ = authsBuilder_.build();
6649        }
6650        result.bitField0_ = to_bitField0_;
6651        onBuilt();
6652        return result;
6653      }
6654
6655      public Builder mergeFrom(com.google.protobuf.Message other) {
6656        if (other instanceof org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto) {
6657          return mergeFrom((org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto)other);
6658        } else {
6659          super.mergeFrom(other);
6660          return this;
6661        }
6662      }
6663
6664      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto other) {
6665        if (other == org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.getDefaultInstance()) return this;
6666        if (other.hasVersion()) {
6667          setVersion(other.getVersion());
6668        }
6669        if (other.hasState()) {
6670          setState(other.getState());
6671        }
6672        if (other.hasToken()) {
6673          setToken(other.getToken());
6674        }
6675        if (authsBuilder_ == null) {
6676          if (!other.auths_.isEmpty()) {
6677            if (auths_.isEmpty()) {
6678              auths_ = other.auths_;
6679              bitField0_ = (bitField0_ & ~0x00000008);
6680            } else {
6681              ensureAuthsIsMutable();
6682              auths_.addAll(other.auths_);
6683            }
6684            onChanged();
6685          }
6686        } else {
6687          if (!other.auths_.isEmpty()) {
6688            if (authsBuilder_.isEmpty()) {
6689              authsBuilder_.dispose();
6690              authsBuilder_ = null;
6691              auths_ = other.auths_;
6692              bitField0_ = (bitField0_ & ~0x00000008);
6693              authsBuilder_ = 
6694                com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
6695                   getAuthsFieldBuilder() : null;
6696            } else {
6697              authsBuilder_.addAllMessages(other.auths_);
6698            }
6699          }
6700        }
6701        this.mergeUnknownFields(other.getUnknownFields());
6702        return this;
6703      }
6704
6705      public final boolean isInitialized() {
6706        if (!hasState()) {
6707          
6708          return false;
6709        }
6710        for (int i = 0; i < getAuthsCount(); i++) {
6711          if (!getAuths(i).isInitialized()) {
6712            
6713            return false;
6714          }
6715        }
6716        return true;
6717      }
6718
6719      public Builder mergeFrom(
6720          com.google.protobuf.CodedInputStream input,
6721          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6722          throws java.io.IOException {
6723        org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto parsedMessage = null;
6724        try {
6725          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
6726        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
6727          parsedMessage = (org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto) e.getUnfinishedMessage();
6728          throw e;
6729        } finally {
6730          if (parsedMessage != null) {
6731            mergeFrom(parsedMessage);
6732          }
6733        }
6734        return this;
6735      }
6736      private int bitField0_;
6737
6738      // optional uint32 version = 1;
6739      private int version_ ;
6740      /**
6741       * <code>optional uint32 version = 1;</code>
6742       */
6743      public boolean hasVersion() {
6744        return ((bitField0_ & 0x00000001) == 0x00000001);
6745      }
6746      /**
6747       * <code>optional uint32 version = 1;</code>
6748       */
6749      public int getVersion() {
6750        return version_;
6751      }
6752      /**
6753       * <code>optional uint32 version = 1;</code>
6754       */
6755      public Builder setVersion(int value) {
6756        bitField0_ |= 0x00000001;
6757        version_ = value;
6758        onChanged();
6759        return this;
6760      }
6761      /**
6762       * <code>optional uint32 version = 1;</code>
6763       */
6764      public Builder clearVersion() {
6765        bitField0_ = (bitField0_ & ~0x00000001);
6766        version_ = 0;
6767        onChanged();
6768        return this;
6769      }
6770
6771      // required .hadoop.common.RpcSaslProto.SaslState state = 2;
6772      private org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState state_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState.SUCCESS;
6773      /**
6774       * <code>required .hadoop.common.RpcSaslProto.SaslState state = 2;</code>
6775       */
6776      public boolean hasState() {
6777        return ((bitField0_ & 0x00000002) == 0x00000002);
6778      }
6779      /**
6780       * <code>required .hadoop.common.RpcSaslProto.SaslState state = 2;</code>
6781       */
6782      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState getState() {
6783        return state_;
6784      }
6785      /**
6786       * <code>required .hadoop.common.RpcSaslProto.SaslState state = 2;</code>
6787       */
6788      public Builder setState(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState value) {
6789        if (value == null) {
6790          throw new NullPointerException();
6791        }
6792        bitField0_ |= 0x00000002;
6793        state_ = value;
6794        onChanged();
6795        return this;
6796      }
6797      /**
6798       * <code>required .hadoop.common.RpcSaslProto.SaslState state = 2;</code>
6799       */
6800      public Builder clearState() {
6801        bitField0_ = (bitField0_ & ~0x00000002);
6802        state_ = org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState.SUCCESS;
6803        onChanged();
6804        return this;
6805      }
6806
6807      // optional bytes token = 3;
6808      private com.google.protobuf.ByteString token_ = com.google.protobuf.ByteString.EMPTY;
6809      /**
6810       * <code>optional bytes token = 3;</code>
6811       */
6812      public boolean hasToken() {
6813        return ((bitField0_ & 0x00000004) == 0x00000004);
6814      }
6815      /**
6816       * <code>optional bytes token = 3;</code>
6817       */
6818      public com.google.protobuf.ByteString getToken() {
6819        return token_;
6820      }
6821      /**
6822       * <code>optional bytes token = 3;</code>
6823       */
6824      public Builder setToken(com.google.protobuf.ByteString value) {
6825        if (value == null) {
6826    throw new NullPointerException();
6827  }
6828  bitField0_ |= 0x00000004;
6829        token_ = value;
6830        onChanged();
6831        return this;
6832      }
6833      /**
6834       * <code>optional bytes token = 3;</code>
6835       */
6836      public Builder clearToken() {
6837        bitField0_ = (bitField0_ & ~0x00000004);
6838        token_ = getDefaultInstance().getToken();
6839        onChanged();
6840        return this;
6841      }
6842
6843      // repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;
6844      private java.util.List<org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth> auths_ =
6845        java.util.Collections.emptyList();
6846      private void ensureAuthsIsMutable() {
6847        if (!((bitField0_ & 0x00000008) == 0x00000008)) {
6848          auths_ = new java.util.ArrayList<org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth>(auths_);
6849          bitField0_ |= 0x00000008;
6850         }
6851      }
6852
6853      private com.google.protobuf.RepeatedFieldBuilder<
6854          org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuthOrBuilder> authsBuilder_;
6855
6856      /**
6857       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
6858       */
6859      public java.util.List<org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth> getAuthsList() {
6860        if (authsBuilder_ == null) {
6861          return java.util.Collections.unmodifiableList(auths_);
6862        } else {
6863          return authsBuilder_.getMessageList();
6864        }
6865      }
6866      /**
6867       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
6868       */
6869      public int getAuthsCount() {
6870        if (authsBuilder_ == null) {
6871          return auths_.size();
6872        } else {
6873          return authsBuilder_.getCount();
6874        }
6875      }
6876      /**
6877       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
6878       */
6879      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth getAuths(int index) {
6880        if (authsBuilder_ == null) {
6881          return auths_.get(index);
6882        } else {
6883          return authsBuilder_.getMessage(index);
6884        }
6885      }
6886      /**
6887       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
6888       */
6889      public Builder setAuths(
6890          int index, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth value) {
6891        if (authsBuilder_ == null) {
6892          if (value == null) {
6893            throw new NullPointerException();
6894          }
6895          ensureAuthsIsMutable();
6896          auths_.set(index, value);
6897          onChanged();
6898        } else {
6899          authsBuilder_.setMessage(index, value);
6900        }
6901        return this;
6902      }
6903      /**
6904       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
6905       */
6906      public Builder setAuths(
6907          int index, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder builderForValue) {
6908        if (authsBuilder_ == null) {
6909          ensureAuthsIsMutable();
6910          auths_.set(index, builderForValue.build());
6911          onChanged();
6912        } else {
6913          authsBuilder_.setMessage(index, builderForValue.build());
6914        }
6915        return this;
6916      }
6917      /**
6918       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
6919       */
6920      public Builder addAuths(org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth value) {
6921        if (authsBuilder_ == null) {
6922          if (value == null) {
6923            throw new NullPointerException();
6924          }
6925          ensureAuthsIsMutable();
6926          auths_.add(value);
6927          onChanged();
6928        } else {
6929          authsBuilder_.addMessage(value);
6930        }
6931        return this;
6932      }
6933      /**
6934       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
6935       */
6936      public Builder addAuths(
6937          int index, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth value) {
6938        if (authsBuilder_ == null) {
6939          if (value == null) {
6940            throw new NullPointerException();
6941          }
6942          ensureAuthsIsMutable();
6943          auths_.add(index, value);
6944          onChanged();
6945        } else {
6946          authsBuilder_.addMessage(index, value);
6947        }
6948        return this;
6949      }
6950      /**
6951       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
6952       */
6953      public Builder addAuths(
6954          org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder builderForValue) {
6955        if (authsBuilder_ == null) {
6956          ensureAuthsIsMutable();
6957          auths_.add(builderForValue.build());
6958          onChanged();
6959        } else {
6960          authsBuilder_.addMessage(builderForValue.build());
6961        }
6962        return this;
6963      }
6964      /**
6965       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
6966       */
6967      public Builder addAuths(
6968          int index, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder builderForValue) {
6969        if (authsBuilder_ == null) {
6970          ensureAuthsIsMutable();
6971          auths_.add(index, builderForValue.build());
6972          onChanged();
6973        } else {
6974          authsBuilder_.addMessage(index, builderForValue.build());
6975        }
6976        return this;
6977      }
6978      /**
6979       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
6980       */
6981      public Builder addAllAuths(
6982          java.lang.Iterable<? extends org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth> values) {
6983        if (authsBuilder_ == null) {
6984          ensureAuthsIsMutable();
6985          super.addAll(values, auths_);
6986          onChanged();
6987        } else {
6988          authsBuilder_.addAllMessages(values);
6989        }
6990        return this;
6991      }
6992      /**
6993       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
6994       */
6995      public Builder clearAuths() {
6996        if (authsBuilder_ == null) {
6997          auths_ = java.util.Collections.emptyList();
6998          bitField0_ = (bitField0_ & ~0x00000008);
6999          onChanged();
7000        } else {
7001          authsBuilder_.clear();
7002        }
7003        return this;
7004      }
7005      /**
7006       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
7007       */
7008      public Builder removeAuths(int index) {
7009        if (authsBuilder_ == null) {
7010          ensureAuthsIsMutable();
7011          auths_.remove(index);
7012          onChanged();
7013        } else {
7014          authsBuilder_.remove(index);
7015        }
7016        return this;
7017      }
7018      /**
7019       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
7020       */
7021      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder getAuthsBuilder(
7022          int index) {
7023        return getAuthsFieldBuilder().getBuilder(index);
7024      }
7025      /**
7026       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
7027       */
7028      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuthOrBuilder getAuthsOrBuilder(
7029          int index) {
7030        if (authsBuilder_ == null) {
7031          return auths_.get(index);  } else {
7032          return authsBuilder_.getMessageOrBuilder(index);
7033        }
7034      }
7035      /**
7036       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
7037       */
7038      public java.util.List<? extends org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuthOrBuilder> 
7039           getAuthsOrBuilderList() {
7040        if (authsBuilder_ != null) {
7041          return authsBuilder_.getMessageOrBuilderList();
7042        } else {
7043          return java.util.Collections.unmodifiableList(auths_);
7044        }
7045      }
7046      /**
7047       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
7048       */
7049      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder addAuthsBuilder() {
7050        return getAuthsFieldBuilder().addBuilder(
7051            org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.getDefaultInstance());
7052      }
7053      /**
7054       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
7055       */
7056      public org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder addAuthsBuilder(
7057          int index) {
7058        return getAuthsFieldBuilder().addBuilder(
7059            index, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.getDefaultInstance());
7060      }
7061      /**
7062       * <code>repeated .hadoop.common.RpcSaslProto.SaslAuth auths = 4;</code>
7063       */
7064      public java.util.List<org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder> 
7065           getAuthsBuilderList() {
7066        return getAuthsFieldBuilder().getBuilderList();
7067      }
7068      private com.google.protobuf.RepeatedFieldBuilder<
7069          org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuthOrBuilder> 
7070          getAuthsFieldBuilder() {
7071        if (authsBuilder_ == null) {
7072          authsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
7073              org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth.Builder, org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuthOrBuilder>(
7074                  auths_,
7075                  ((bitField0_ & 0x00000008) == 0x00000008),
7076                  getParentForChildren(),
7077                  isClean());
7078          auths_ = null;
7079        }
7080        return authsBuilder_;
7081      }
7082
7083      // @@protoc_insertion_point(builder_scope:hadoop.common.RpcSaslProto)
7084    }
7085
7086    static {
7087      defaultInstance = new RpcSaslProto(true);
7088      defaultInstance.initFields();
7089    }
7090
7091    // @@protoc_insertion_point(class_scope:hadoop.common.RpcSaslProto)
7092  }
7093
7094  private static com.google.protobuf.Descriptors.Descriptor
7095    internal_static_hadoop_common_RPCTraceInfoProto_descriptor;
7096  private static
7097    com.google.protobuf.GeneratedMessage.FieldAccessorTable
7098      internal_static_hadoop_common_RPCTraceInfoProto_fieldAccessorTable;
7099  private static com.google.protobuf.Descriptors.Descriptor
7100    internal_static_hadoop_common_RPCCallerContextProto_descriptor;
7101  private static
7102    com.google.protobuf.GeneratedMessage.FieldAccessorTable
7103      internal_static_hadoop_common_RPCCallerContextProto_fieldAccessorTable;
7104  private static com.google.protobuf.Descriptors.Descriptor
7105    internal_static_hadoop_common_RpcRequestHeaderProto_descriptor;
7106  private static
7107    com.google.protobuf.GeneratedMessage.FieldAccessorTable
7108      internal_static_hadoop_common_RpcRequestHeaderProto_fieldAccessorTable;
7109  private static com.google.protobuf.Descriptors.Descriptor
7110    internal_static_hadoop_common_RpcResponseHeaderProto_descriptor;
7111  private static
7112    com.google.protobuf.GeneratedMessage.FieldAccessorTable
7113      internal_static_hadoop_common_RpcResponseHeaderProto_fieldAccessorTable;
7114  private static com.google.protobuf.Descriptors.Descriptor
7115    internal_static_hadoop_common_RpcSaslProto_descriptor;
7116  private static
7117    com.google.protobuf.GeneratedMessage.FieldAccessorTable
7118      internal_static_hadoop_common_RpcSaslProto_fieldAccessorTable;
7119  private static com.google.protobuf.Descriptors.Descriptor
7120    internal_static_hadoop_common_RpcSaslProto_SaslAuth_descriptor;
7121  private static
7122    com.google.protobuf.GeneratedMessage.FieldAccessorTable
7123      internal_static_hadoop_common_RpcSaslProto_SaslAuth_fieldAccessorTable;
7124
7125  public static com.google.protobuf.Descriptors.FileDescriptor
7126      getDescriptor() {
7127    return descriptor;
7128  }
7129  private static com.google.protobuf.Descriptors.FileDescriptor
7130      descriptor;
7131  static {
7132    java.lang.String[] descriptorData = {
7133      "\n\017RpcHeader.proto\022\rhadoop.common\"6\n\021RPCT" +
7134      "raceInfoProto\022\017\n\007traceId\030\001 \001(\003\022\020\n\010parent" +
7135      "Id\030\002 \001(\003\";\n\025RPCCallerContextProto\022\017\n\007con" +
7136      "text\030\001 \002(\t\022\021\n\tsignature\030\002 \001(\014\"\224\003\n\025RpcReq" +
7137      "uestHeaderProto\022,\n\007rpcKind\030\001 \001(\0162\033.hadoo" +
7138      "p.common.RpcKindProto\022B\n\005rpcOp\030\002 \001(\01623.h" +
7139      "adoop.common.RpcRequestHeaderProto.Opera" +
7140      "tionProto\022\016\n\006callId\030\003 \002(\021\022\020\n\010clientId\030\004 " +
7141      "\002(\014\022\026\n\nretryCount\030\005 \001(\021:\002-1\0223\n\ttraceInfo" +
7142      "\030\006 \001(\0132 .hadoop.common.RPCTraceInfoProto",
7143      "\022;\n\rcallerContext\030\007 \001(\0132$.hadoop.common." +
7144      "RPCCallerContextProto\"]\n\016OperationProto\022" +
7145      "\024\n\020RPC_FINAL_PACKET\020\000\022\033\n\027RPC_CONTINUATIO" +
7146      "N_PACKET\020\001\022\030\n\024RPC_CLOSE_CONNECTION\020\002\"\312\005\n" +
7147      "\026RpcResponseHeaderProto\022\016\n\006callId\030\001 \002(\r\022" +
7148      "D\n\006status\030\002 \002(\01624.hadoop.common.RpcRespo" +
7149      "nseHeaderProto.RpcStatusProto\022\033\n\023serverI" +
7150      "pcVersionNum\030\003 \001(\r\022\032\n\022exceptionClassName" +
7151      "\030\004 \001(\t\022\020\n\010errorMsg\030\005 \001(\t\022L\n\013errorDetail\030" +
7152      "\006 \001(\01627.hadoop.common.RpcResponseHeaderP",
7153      "roto.RpcErrorCodeProto\022\020\n\010clientId\030\007 \001(\014" +
7154      "\022\026\n\nretryCount\030\010 \001(\021:\002-1\"3\n\016RpcStatusPro" +
7155      "to\022\013\n\007SUCCESS\020\000\022\t\n\005ERROR\020\001\022\t\n\005FATAL\020\002\"\341\002" +
7156      "\n\021RpcErrorCodeProto\022\025\n\021ERROR_APPLICATION" +
7157      "\020\001\022\030\n\024ERROR_NO_SUCH_METHOD\020\002\022\032\n\026ERROR_NO" +
7158      "_SUCH_PROTOCOL\020\003\022\024\n\020ERROR_RPC_SERVER\020\004\022\036" +
7159      "\n\032ERROR_SERIALIZING_RESPONSE\020\005\022\036\n\032ERROR_" +
7160      "RPC_VERSION_MISMATCH\020\006\022\021\n\rFATAL_UNKNOWN\020" +
7161      "\n\022#\n\037FATAL_UNSUPPORTED_SERIALIZATION\020\013\022\034" +
7162      "\n\030FATAL_INVALID_RPC_HEADER\020\014\022\037\n\033FATAL_DE",
7163      "SERIALIZING_REQUEST\020\r\022\032\n\026FATAL_VERSION_M" +
7164      "ISMATCH\020\016\022\026\n\022FATAL_UNAUTHORIZED\020\017\"\335\002\n\014Rp" +
7165      "cSaslProto\022\017\n\007version\030\001 \001(\r\0224\n\005state\030\002 \002" +
7166      "(\0162%.hadoop.common.RpcSaslProto.SaslStat" +
7167      "e\022\r\n\005token\030\003 \001(\014\0223\n\005auths\030\004 \003(\0132$.hadoop" +
7168      ".common.RpcSaslProto.SaslAuth\032d\n\010SaslAut" +
7169      "h\022\016\n\006method\030\001 \002(\t\022\021\n\tmechanism\030\002 \002(\t\022\020\n\010" +
7170      "protocol\030\003 \001(\t\022\020\n\010serverId\030\004 \001(\t\022\021\n\tchal" +
7171      "lenge\030\005 \001(\014\"\\\n\tSaslState\022\013\n\007SUCCESS\020\000\022\r\n" +
7172      "\tNEGOTIATE\020\001\022\014\n\010INITIATE\020\002\022\r\n\tCHALLENGE\020",
7173      "\003\022\014\n\010RESPONSE\020\004\022\010\n\004WRAP\020\005*J\n\014RpcKindProt" +
7174      "o\022\017\n\013RPC_BUILTIN\020\000\022\020\n\014RPC_WRITABLE\020\001\022\027\n\023" +
7175      "RPC_PROTOCOL_BUFFER\020\002B4\n\036org.apache.hado" +
7176      "op.ipc.protobufB\017RpcHeaderProtos\240\001\001"
7177    };
7178    com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
7179      new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
7180        public com.google.protobuf.ExtensionRegistry assignDescriptors(
7181            com.google.protobuf.Descriptors.FileDescriptor root) {
7182          descriptor = root;
7183          internal_static_hadoop_common_RPCTraceInfoProto_descriptor =
7184            getDescriptor().getMessageTypes().get(0);
7185          internal_static_hadoop_common_RPCTraceInfoProto_fieldAccessorTable = new
7186            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
7187              internal_static_hadoop_common_RPCTraceInfoProto_descriptor,
7188              new java.lang.String[] { "TraceId", "ParentId", });
7189          internal_static_hadoop_common_RPCCallerContextProto_descriptor =
7190            getDescriptor().getMessageTypes().get(1);
7191          internal_static_hadoop_common_RPCCallerContextProto_fieldAccessorTable = new
7192            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
7193              internal_static_hadoop_common_RPCCallerContextProto_descriptor,
7194              new java.lang.String[] { "Context", "Signature", });
7195          internal_static_hadoop_common_RpcRequestHeaderProto_descriptor =
7196            getDescriptor().getMessageTypes().get(2);
7197          internal_static_hadoop_common_RpcRequestHeaderProto_fieldAccessorTable = new
7198            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
7199              internal_static_hadoop_common_RpcRequestHeaderProto_descriptor,
7200              new java.lang.String[] { "RpcKind", "RpcOp", "CallId", "ClientId", "RetryCount", "TraceInfo", "CallerContext", });
7201          internal_static_hadoop_common_RpcResponseHeaderProto_descriptor =
7202            getDescriptor().getMessageTypes().get(3);
7203          internal_static_hadoop_common_RpcResponseHeaderProto_fieldAccessorTable = new
7204            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
7205              internal_static_hadoop_common_RpcResponseHeaderProto_descriptor,
7206              new java.lang.String[] { "CallId", "Status", "ServerIpcVersionNum", "ExceptionClassName", "ErrorMsg", "ErrorDetail", "ClientId", "RetryCount", });
7207          internal_static_hadoop_common_RpcSaslProto_descriptor =
7208            getDescriptor().getMessageTypes().get(4);
7209          internal_static_hadoop_common_RpcSaslProto_fieldAccessorTable = new
7210            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
7211              internal_static_hadoop_common_RpcSaslProto_descriptor,
7212              new java.lang.String[] { "Version", "State", "Token", "Auths", });
7213          internal_static_hadoop_common_RpcSaslProto_SaslAuth_descriptor =
7214            internal_static_hadoop_common_RpcSaslProto_descriptor.getNestedTypes().get(0);
7215          internal_static_hadoop_common_RpcSaslProto_SaslAuth_fieldAccessorTable = new
7216            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
7217              internal_static_hadoop_common_RpcSaslProto_SaslAuth_descriptor,
7218              new java.lang.String[] { "Method", "Mechanism", "Protocol", "ServerId", "Challenge", });
7219          return null;
7220        }
7221      };
7222    com.google.protobuf.Descriptors.FileDescriptor
7223      .internalBuildGeneratedFileFrom(descriptorData,
7224        new com.google.protobuf.Descriptors.FileDescriptor[] {
7225        }, assigner);
7226  }
7227
7228  // @@protoc_insertion_point(outer_class_scope)
7229}