001    // Generated by the protocol buffer compiler.  DO NOT EDIT!
002    // source: RpcPayloadHeader.proto
003    
004    package org.apache.hadoop.ipc.protobuf;
005    
006    public final class RpcPayloadHeaderProtos {
007      private RpcPayloadHeaderProtos() {}
008      public static void registerAllExtensions(
009          com.google.protobuf.ExtensionRegistry registry) {
010      }
011      public enum RpcKindProto
012          implements com.google.protobuf.ProtocolMessageEnum {
013        RPC_BUILTIN(0, 0),
014        RPC_WRITABLE(1, 1),
015        RPC_PROTOCOL_BUFFER(2, 2),
016        ;
017        
018        public static final int RPC_BUILTIN_VALUE = 0;
019        public static final int RPC_WRITABLE_VALUE = 1;
020        public static final int RPC_PROTOCOL_BUFFER_VALUE = 2;
021        
022        
023        public final int getNumber() { return value; }
024        
025        public static RpcKindProto valueOf(int value) {
026          switch (value) {
027            case 0: return RPC_BUILTIN;
028            case 1: return RPC_WRITABLE;
029            case 2: return RPC_PROTOCOL_BUFFER;
030            default: return null;
031          }
032        }
033        
034        public static com.google.protobuf.Internal.EnumLiteMap<RpcKindProto>
035            internalGetValueMap() {
036          return internalValueMap;
037        }
038        private static com.google.protobuf.Internal.EnumLiteMap<RpcKindProto>
039            internalValueMap =
040              new com.google.protobuf.Internal.EnumLiteMap<RpcKindProto>() {
041                public RpcKindProto findValueByNumber(int number) {
042                  return RpcKindProto.valueOf(number);
043                }
044              };
045        
046        public final com.google.protobuf.Descriptors.EnumValueDescriptor
047            getValueDescriptor() {
048          return getDescriptor().getValues().get(index);
049        }
050        public final com.google.protobuf.Descriptors.EnumDescriptor
051            getDescriptorForType() {
052          return getDescriptor();
053        }
054        public static final com.google.protobuf.Descriptors.EnumDescriptor
055            getDescriptor() {
056          return org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.getDescriptor().getEnumTypes().get(0);
057        }
058        
059        private static final RpcKindProto[] VALUES = {
060          RPC_BUILTIN, RPC_WRITABLE, RPC_PROTOCOL_BUFFER, 
061        };
062        
063        public static RpcKindProto valueOf(
064            com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
065          if (desc.getType() != getDescriptor()) {
066            throw new java.lang.IllegalArgumentException(
067              "EnumValueDescriptor is not for this type.");
068          }
069          return VALUES[desc.getIndex()];
070        }
071        
072        private final int index;
073        private final int value;
074        
075        private RpcKindProto(int index, int value) {
076          this.index = index;
077          this.value = value;
078        }
079        
080        // @@protoc_insertion_point(enum_scope:hadoop.common.RpcKindProto)
081      }
082      
083      public enum RpcPayloadOperationProto
084          implements com.google.protobuf.ProtocolMessageEnum {
085        RPC_FINAL_PAYLOAD(0, 0),
086        RPC_CONTINUATION_PAYLOAD(1, 1),
087        RPC_CLOSE_CONNECTION(2, 2),
088        ;
089        
090        public static final int RPC_FINAL_PAYLOAD_VALUE = 0;
091        public static final int RPC_CONTINUATION_PAYLOAD_VALUE = 1;
092        public static final int RPC_CLOSE_CONNECTION_VALUE = 2;
093        
094        
095        public final int getNumber() { return value; }
096        
097        public static RpcPayloadOperationProto valueOf(int value) {
098          switch (value) {
099            case 0: return RPC_FINAL_PAYLOAD;
100            case 1: return RPC_CONTINUATION_PAYLOAD;
101            case 2: return RPC_CLOSE_CONNECTION;
102            default: return null;
103          }
104        }
105        
106        public static com.google.protobuf.Internal.EnumLiteMap<RpcPayloadOperationProto>
107            internalGetValueMap() {
108          return internalValueMap;
109        }
110        private static com.google.protobuf.Internal.EnumLiteMap<RpcPayloadOperationProto>
111            internalValueMap =
112              new com.google.protobuf.Internal.EnumLiteMap<RpcPayloadOperationProto>() {
113                public RpcPayloadOperationProto findValueByNumber(int number) {
114                  return RpcPayloadOperationProto.valueOf(number);
115                }
116              };
117        
118        public final com.google.protobuf.Descriptors.EnumValueDescriptor
119            getValueDescriptor() {
120          return getDescriptor().getValues().get(index);
121        }
122        public final com.google.protobuf.Descriptors.EnumDescriptor
123            getDescriptorForType() {
124          return getDescriptor();
125        }
126        public static final com.google.protobuf.Descriptors.EnumDescriptor
127            getDescriptor() {
128          return org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.getDescriptor().getEnumTypes().get(1);
129        }
130        
131        private static final RpcPayloadOperationProto[] VALUES = {
132          RPC_FINAL_PAYLOAD, RPC_CONTINUATION_PAYLOAD, RPC_CLOSE_CONNECTION, 
133        };
134        
135        public static RpcPayloadOperationProto valueOf(
136            com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
137          if (desc.getType() != getDescriptor()) {
138            throw new java.lang.IllegalArgumentException(
139              "EnumValueDescriptor is not for this type.");
140          }
141          return VALUES[desc.getIndex()];
142        }
143        
144        private final int index;
145        private final int value;
146        
147        private RpcPayloadOperationProto(int index, int value) {
148          this.index = index;
149          this.value = value;
150        }
151        
152        // @@protoc_insertion_point(enum_scope:hadoop.common.RpcPayloadOperationProto)
153      }
154      
155      public enum RpcStatusProto
156          implements com.google.protobuf.ProtocolMessageEnum {
157        SUCCESS(0, 0),
158        ERROR(1, 1),
159        FATAL(2, 2),
160        ;
161        
162        public static final int SUCCESS_VALUE = 0;
163        public static final int ERROR_VALUE = 1;
164        public static final int FATAL_VALUE = 2;
165        
166        
167        public final int getNumber() { return value; }
168        
169        public static RpcStatusProto valueOf(int value) {
170          switch (value) {
171            case 0: return SUCCESS;
172            case 1: return ERROR;
173            case 2: return FATAL;
174            default: return null;
175          }
176        }
177        
178        public static com.google.protobuf.Internal.EnumLiteMap<RpcStatusProto>
179            internalGetValueMap() {
180          return internalValueMap;
181        }
182        private static com.google.protobuf.Internal.EnumLiteMap<RpcStatusProto>
183            internalValueMap =
184              new com.google.protobuf.Internal.EnumLiteMap<RpcStatusProto>() {
185                public RpcStatusProto findValueByNumber(int number) {
186                  return RpcStatusProto.valueOf(number);
187                }
188              };
189        
190        public final com.google.protobuf.Descriptors.EnumValueDescriptor
191            getValueDescriptor() {
192          return getDescriptor().getValues().get(index);
193        }
194        public final com.google.protobuf.Descriptors.EnumDescriptor
195            getDescriptorForType() {
196          return getDescriptor();
197        }
198        public static final com.google.protobuf.Descriptors.EnumDescriptor
199            getDescriptor() {
200          return org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.getDescriptor().getEnumTypes().get(2);
201        }
202        
203        private static final RpcStatusProto[] VALUES = {
204          SUCCESS, ERROR, FATAL, 
205        };
206        
207        public static RpcStatusProto valueOf(
208            com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
209          if (desc.getType() != getDescriptor()) {
210            throw new java.lang.IllegalArgumentException(
211              "EnumValueDescriptor is not for this type.");
212          }
213          return VALUES[desc.getIndex()];
214        }
215        
216        private final int index;
217        private final int value;
218        
219        private RpcStatusProto(int index, int value) {
220          this.index = index;
221          this.value = value;
222        }
223        
224        // @@protoc_insertion_point(enum_scope:hadoop.common.RpcStatusProto)
225      }
226      
227      public interface RpcPayloadHeaderProtoOrBuilder
228          extends com.google.protobuf.MessageOrBuilder {
229        
230        // optional .hadoop.common.RpcKindProto rpcKind = 1;
231        boolean hasRpcKind();
232        org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcKindProto getRpcKind();
233        
234        // optional .hadoop.common.RpcPayloadOperationProto rpcOp = 2;
235        boolean hasRpcOp();
236        org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadOperationProto getRpcOp();
237        
238        // required uint32 callId = 3;
239        boolean hasCallId();
240        int getCallId();
241      }
242      public static final class RpcPayloadHeaderProto extends
243          com.google.protobuf.GeneratedMessage
244          implements RpcPayloadHeaderProtoOrBuilder {
245        // Use RpcPayloadHeaderProto.newBuilder() to construct.
246        private RpcPayloadHeaderProto(Builder builder) {
247          super(builder);
248        }
249        private RpcPayloadHeaderProto(boolean noInit) {}
250        
251        private static final RpcPayloadHeaderProto defaultInstance;
252        public static RpcPayloadHeaderProto getDefaultInstance() {
253          return defaultInstance;
254        }
255        
256        public RpcPayloadHeaderProto getDefaultInstanceForType() {
257          return defaultInstance;
258        }
259        
260        public static final com.google.protobuf.Descriptors.Descriptor
261            getDescriptor() {
262          return org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.internal_static_hadoop_common_RpcPayloadHeaderProto_descriptor;
263        }
264        
265        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
266            internalGetFieldAccessorTable() {
267          return org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.internal_static_hadoop_common_RpcPayloadHeaderProto_fieldAccessorTable;
268        }
269        
270        private int bitField0_;
271        // optional .hadoop.common.RpcKindProto rpcKind = 1;
272        public static final int RPCKIND_FIELD_NUMBER = 1;
273        private org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcKindProto rpcKind_;
274        public boolean hasRpcKind() {
275          return ((bitField0_ & 0x00000001) == 0x00000001);
276        }
277        public org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcKindProto getRpcKind() {
278          return rpcKind_;
279        }
280        
281        // optional .hadoop.common.RpcPayloadOperationProto rpcOp = 2;
282        public static final int RPCOP_FIELD_NUMBER = 2;
283        private org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadOperationProto rpcOp_;
284        public boolean hasRpcOp() {
285          return ((bitField0_ & 0x00000002) == 0x00000002);
286        }
287        public org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadOperationProto getRpcOp() {
288          return rpcOp_;
289        }
290        
291        // required uint32 callId = 3;
292        public static final int CALLID_FIELD_NUMBER = 3;
293        private int callId_;
294        public boolean hasCallId() {
295          return ((bitField0_ & 0x00000004) == 0x00000004);
296        }
297        public int getCallId() {
298          return callId_;
299        }
300        
301        private void initFields() {
302          rpcKind_ = org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcKindProto.RPC_BUILTIN;
303          rpcOp_ = org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadOperationProto.RPC_FINAL_PAYLOAD;
304          callId_ = 0;
305        }
306        private byte memoizedIsInitialized = -1;
307        public final boolean isInitialized() {
308          byte isInitialized = memoizedIsInitialized;
309          if (isInitialized != -1) return isInitialized == 1;
310          
311          if (!hasCallId()) {
312            memoizedIsInitialized = 0;
313            return false;
314          }
315          memoizedIsInitialized = 1;
316          return true;
317        }
318        
319        public void writeTo(com.google.protobuf.CodedOutputStream output)
320                            throws java.io.IOException {
321          getSerializedSize();
322          if (((bitField0_ & 0x00000001) == 0x00000001)) {
323            output.writeEnum(1, rpcKind_.getNumber());
324          }
325          if (((bitField0_ & 0x00000002) == 0x00000002)) {
326            output.writeEnum(2, rpcOp_.getNumber());
327          }
328          if (((bitField0_ & 0x00000004) == 0x00000004)) {
329            output.writeUInt32(3, callId_);
330          }
331          getUnknownFields().writeTo(output);
332        }
333        
334        private int memoizedSerializedSize = -1;
335        public int getSerializedSize() {
336          int size = memoizedSerializedSize;
337          if (size != -1) return size;
338        
339          size = 0;
340          if (((bitField0_ & 0x00000001) == 0x00000001)) {
341            size += com.google.protobuf.CodedOutputStream
342              .computeEnumSize(1, rpcKind_.getNumber());
343          }
344          if (((bitField0_ & 0x00000002) == 0x00000002)) {
345            size += com.google.protobuf.CodedOutputStream
346              .computeEnumSize(2, rpcOp_.getNumber());
347          }
348          if (((bitField0_ & 0x00000004) == 0x00000004)) {
349            size += com.google.protobuf.CodedOutputStream
350              .computeUInt32Size(3, callId_);
351          }
352          size += getUnknownFields().getSerializedSize();
353          memoizedSerializedSize = size;
354          return size;
355        }
356        
357        private static final long serialVersionUID = 0L;
358        @java.lang.Override
359        protected java.lang.Object writeReplace()
360            throws java.io.ObjectStreamException {
361          return super.writeReplace();
362        }
363        
364        @java.lang.Override
365        public boolean equals(final java.lang.Object obj) {
366          if (obj == this) {
367           return true;
368          }
369          if (!(obj instanceof org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadHeaderProto)) {
370            return super.equals(obj);
371          }
372          org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadHeaderProto other = (org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadHeaderProto) obj;
373          
374          boolean result = true;
375          result = result && (hasRpcKind() == other.hasRpcKind());
376          if (hasRpcKind()) {
377            result = result &&
378                (getRpcKind() == other.getRpcKind());
379          }
380          result = result && (hasRpcOp() == other.hasRpcOp());
381          if (hasRpcOp()) {
382            result = result &&
383                (getRpcOp() == other.getRpcOp());
384          }
385          result = result && (hasCallId() == other.hasCallId());
386          if (hasCallId()) {
387            result = result && (getCallId()
388                == other.getCallId());
389          }
390          result = result &&
391              getUnknownFields().equals(other.getUnknownFields());
392          return result;
393        }
394        
395        @java.lang.Override
396        public int hashCode() {
397          int hash = 41;
398          hash = (19 * hash) + getDescriptorForType().hashCode();
399          if (hasRpcKind()) {
400            hash = (37 * hash) + RPCKIND_FIELD_NUMBER;
401            hash = (53 * hash) + hashEnum(getRpcKind());
402          }
403          if (hasRpcOp()) {
404            hash = (37 * hash) + RPCOP_FIELD_NUMBER;
405            hash = (53 * hash) + hashEnum(getRpcOp());
406          }
407          if (hasCallId()) {
408            hash = (37 * hash) + CALLID_FIELD_NUMBER;
409            hash = (53 * hash) + getCallId();
410          }
411          hash = (29 * hash) + getUnknownFields().hashCode();
412          return hash;
413        }
414        
415        public static org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadHeaderProto parseFrom(
416            com.google.protobuf.ByteString data)
417            throws com.google.protobuf.InvalidProtocolBufferException {
418          return newBuilder().mergeFrom(data).buildParsed();
419        }
420        public static org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadHeaderProto parseFrom(
421            com.google.protobuf.ByteString data,
422            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
423            throws com.google.protobuf.InvalidProtocolBufferException {
424          return newBuilder().mergeFrom(data, extensionRegistry)
425                   .buildParsed();
426        }
427        public static org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadHeaderProto parseFrom(byte[] data)
428            throws com.google.protobuf.InvalidProtocolBufferException {
429          return newBuilder().mergeFrom(data).buildParsed();
430        }
431        public static org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadHeaderProto parseFrom(
432            byte[] data,
433            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
434            throws com.google.protobuf.InvalidProtocolBufferException {
435          return newBuilder().mergeFrom(data, extensionRegistry)
436                   .buildParsed();
437        }
438        public static org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadHeaderProto parseFrom(java.io.InputStream input)
439            throws java.io.IOException {
440          return newBuilder().mergeFrom(input).buildParsed();
441        }
442        public static org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadHeaderProto parseFrom(
443            java.io.InputStream input,
444            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
445            throws java.io.IOException {
446          return newBuilder().mergeFrom(input, extensionRegistry)
447                   .buildParsed();
448        }
449        public static org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadHeaderProto parseDelimitedFrom(java.io.InputStream input)
450            throws java.io.IOException {
451          Builder builder = newBuilder();
452          if (builder.mergeDelimitedFrom(input)) {
453            return builder.buildParsed();
454          } else {
455            return null;
456          }
457        }
458        public static org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadHeaderProto parseDelimitedFrom(
459            java.io.InputStream input,
460            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
461            throws java.io.IOException {
462          Builder builder = newBuilder();
463          if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
464            return builder.buildParsed();
465          } else {
466            return null;
467          }
468        }
469        public static org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadHeaderProto parseFrom(
470            com.google.protobuf.CodedInputStream input)
471            throws java.io.IOException {
472          return newBuilder().mergeFrom(input).buildParsed();
473        }
474        public static org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadHeaderProto parseFrom(
475            com.google.protobuf.CodedInputStream input,
476            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
477            throws java.io.IOException {
478          return newBuilder().mergeFrom(input, extensionRegistry)
479                   .buildParsed();
480        }
481        
482        public static Builder newBuilder() { return Builder.create(); }
483        public Builder newBuilderForType() { return newBuilder(); }
484        public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadHeaderProto prototype) {
485          return newBuilder().mergeFrom(prototype);
486        }
487        public Builder toBuilder() { return newBuilder(this); }
488        
489        @java.lang.Override
490        protected Builder newBuilderForType(
491            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
492          Builder builder = new Builder(parent);
493          return builder;
494        }
495        public static final class Builder extends
496            com.google.protobuf.GeneratedMessage.Builder<Builder>
497           implements org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadHeaderProtoOrBuilder {
498          public static final com.google.protobuf.Descriptors.Descriptor
499              getDescriptor() {
500            return org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.internal_static_hadoop_common_RpcPayloadHeaderProto_descriptor;
501          }
502          
503          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
504              internalGetFieldAccessorTable() {
505            return org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.internal_static_hadoop_common_RpcPayloadHeaderProto_fieldAccessorTable;
506          }
507          
508          // Construct using org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadHeaderProto.newBuilder()
509          private Builder() {
510            maybeForceBuilderInitialization();
511          }
512          
513          private Builder(BuilderParent parent) {
514            super(parent);
515            maybeForceBuilderInitialization();
516          }
517          private void maybeForceBuilderInitialization() {
518            if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
519            }
520          }
521          private static Builder create() {
522            return new Builder();
523          }
524          
525          public Builder clear() {
526            super.clear();
527            rpcKind_ = org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcKindProto.RPC_BUILTIN;
528            bitField0_ = (bitField0_ & ~0x00000001);
529            rpcOp_ = org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadOperationProto.RPC_FINAL_PAYLOAD;
530            bitField0_ = (bitField0_ & ~0x00000002);
531            callId_ = 0;
532            bitField0_ = (bitField0_ & ~0x00000004);
533            return this;
534          }
535          
536          public Builder clone() {
537            return create().mergeFrom(buildPartial());
538          }
539          
540          public com.google.protobuf.Descriptors.Descriptor
541              getDescriptorForType() {
542            return org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadHeaderProto.getDescriptor();
543          }
544          
545          public org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadHeaderProto getDefaultInstanceForType() {
546            return org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadHeaderProto.getDefaultInstance();
547          }
548          
549          public org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadHeaderProto build() {
550            org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadHeaderProto result = buildPartial();
551            if (!result.isInitialized()) {
552              throw newUninitializedMessageException(result);
553            }
554            return result;
555          }
556          
557          private org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadHeaderProto buildParsed()
558              throws com.google.protobuf.InvalidProtocolBufferException {
559            org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadHeaderProto result = buildPartial();
560            if (!result.isInitialized()) {
561              throw newUninitializedMessageException(
562                result).asInvalidProtocolBufferException();
563            }
564            return result;
565          }
566          
567          public org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadHeaderProto buildPartial() {
568            org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadHeaderProto result = new org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadHeaderProto(this);
569            int from_bitField0_ = bitField0_;
570            int to_bitField0_ = 0;
571            if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
572              to_bitField0_ |= 0x00000001;
573            }
574            result.rpcKind_ = rpcKind_;
575            if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
576              to_bitField0_ |= 0x00000002;
577            }
578            result.rpcOp_ = rpcOp_;
579            if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
580              to_bitField0_ |= 0x00000004;
581            }
582            result.callId_ = callId_;
583            result.bitField0_ = to_bitField0_;
584            onBuilt();
585            return result;
586          }
587          
588          public Builder mergeFrom(com.google.protobuf.Message other) {
589            if (other instanceof org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadHeaderProto) {
590              return mergeFrom((org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadHeaderProto)other);
591            } else {
592              super.mergeFrom(other);
593              return this;
594            }
595          }
596          
597          public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadHeaderProto other) {
598            if (other == org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadHeaderProto.getDefaultInstance()) return this;
599            if (other.hasRpcKind()) {
600              setRpcKind(other.getRpcKind());
601            }
602            if (other.hasRpcOp()) {
603              setRpcOp(other.getRpcOp());
604            }
605            if (other.hasCallId()) {
606              setCallId(other.getCallId());
607            }
608            this.mergeUnknownFields(other.getUnknownFields());
609            return this;
610          }
611          
612          public final boolean isInitialized() {
613            if (!hasCallId()) {
614              
615              return false;
616            }
617            return true;
618          }
619          
620          public Builder mergeFrom(
621              com.google.protobuf.CodedInputStream input,
622              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
623              throws java.io.IOException {
624            com.google.protobuf.UnknownFieldSet.Builder unknownFields =
625              com.google.protobuf.UnknownFieldSet.newBuilder(
626                this.getUnknownFields());
627            while (true) {
628              int tag = input.readTag();
629              switch (tag) {
630                case 0:
631                  this.setUnknownFields(unknownFields.build());
632                  onChanged();
633                  return this;
634                default: {
635                  if (!parseUnknownField(input, unknownFields,
636                                         extensionRegistry, tag)) {
637                    this.setUnknownFields(unknownFields.build());
638                    onChanged();
639                    return this;
640                  }
641                  break;
642                }
643                case 8: {
644                  int rawValue = input.readEnum();
645                  org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcKindProto value = org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcKindProto.valueOf(rawValue);
646                  if (value == null) {
647                    unknownFields.mergeVarintField(1, rawValue);
648                  } else {
649                    bitField0_ |= 0x00000001;
650                    rpcKind_ = value;
651                  }
652                  break;
653                }
654                case 16: {
655                  int rawValue = input.readEnum();
656                  org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadOperationProto value = org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadOperationProto.valueOf(rawValue);
657                  if (value == null) {
658                    unknownFields.mergeVarintField(2, rawValue);
659                  } else {
660                    bitField0_ |= 0x00000002;
661                    rpcOp_ = value;
662                  }
663                  break;
664                }
665                case 24: {
666                  bitField0_ |= 0x00000004;
667                  callId_ = input.readUInt32();
668                  break;
669                }
670              }
671            }
672          }
673          
674          private int bitField0_;
675          
676          // optional .hadoop.common.RpcKindProto rpcKind = 1;
677          private org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcKindProto rpcKind_ = org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcKindProto.RPC_BUILTIN;
678          public boolean hasRpcKind() {
679            return ((bitField0_ & 0x00000001) == 0x00000001);
680          }
681          public org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcKindProto getRpcKind() {
682            return rpcKind_;
683          }
684          public Builder setRpcKind(org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcKindProto value) {
685            if (value == null) {
686              throw new NullPointerException();
687            }
688            bitField0_ |= 0x00000001;
689            rpcKind_ = value;
690            onChanged();
691            return this;
692          }
693          public Builder clearRpcKind() {
694            bitField0_ = (bitField0_ & ~0x00000001);
695            rpcKind_ = org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcKindProto.RPC_BUILTIN;
696            onChanged();
697            return this;
698          }
699          
700          // optional .hadoop.common.RpcPayloadOperationProto rpcOp = 2;
701          private org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadOperationProto rpcOp_ = org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadOperationProto.RPC_FINAL_PAYLOAD;
702          public boolean hasRpcOp() {
703            return ((bitField0_ & 0x00000002) == 0x00000002);
704          }
705          public org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadOperationProto getRpcOp() {
706            return rpcOp_;
707          }
708          public Builder setRpcOp(org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadOperationProto value) {
709            if (value == null) {
710              throw new NullPointerException();
711            }
712            bitField0_ |= 0x00000002;
713            rpcOp_ = value;
714            onChanged();
715            return this;
716          }
717          public Builder clearRpcOp() {
718            bitField0_ = (bitField0_ & ~0x00000002);
719            rpcOp_ = org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadOperationProto.RPC_FINAL_PAYLOAD;
720            onChanged();
721            return this;
722          }
723          
724          // required uint32 callId = 3;
725          private int callId_ ;
726          public boolean hasCallId() {
727            return ((bitField0_ & 0x00000004) == 0x00000004);
728          }
729          public int getCallId() {
730            return callId_;
731          }
732          public Builder setCallId(int value) {
733            bitField0_ |= 0x00000004;
734            callId_ = value;
735            onChanged();
736            return this;
737          }
738          public Builder clearCallId() {
739            bitField0_ = (bitField0_ & ~0x00000004);
740            callId_ = 0;
741            onChanged();
742            return this;
743          }
744          
745          // @@protoc_insertion_point(builder_scope:hadoop.common.RpcPayloadHeaderProto)
746        }
747        
748        static {
749          defaultInstance = new RpcPayloadHeaderProto(true);
750          defaultInstance.initFields();
751        }
752        
753        // @@protoc_insertion_point(class_scope:hadoop.common.RpcPayloadHeaderProto)
754      }
755      
756      public interface RpcResponseHeaderProtoOrBuilder
757          extends com.google.protobuf.MessageOrBuilder {
758        
759        // required uint32 callId = 1;
760        boolean hasCallId();
761        int getCallId();
762        
763        // required .hadoop.common.RpcStatusProto status = 2;
764        boolean hasStatus();
765        org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcStatusProto getStatus();
766        
767        // optional uint32 serverIpcVersionNum = 3;
768        boolean hasServerIpcVersionNum();
769        int getServerIpcVersionNum();
770      }
771      public static final class RpcResponseHeaderProto extends
772          com.google.protobuf.GeneratedMessage
773          implements RpcResponseHeaderProtoOrBuilder {
774        // Use RpcResponseHeaderProto.newBuilder() to construct.
775        private RpcResponseHeaderProto(Builder builder) {
776          super(builder);
777        }
778        private RpcResponseHeaderProto(boolean noInit) {}
779        
780        private static final RpcResponseHeaderProto defaultInstance;
781        public static RpcResponseHeaderProto getDefaultInstance() {
782          return defaultInstance;
783        }
784        
785        public RpcResponseHeaderProto getDefaultInstanceForType() {
786          return defaultInstance;
787        }
788        
789        public static final com.google.protobuf.Descriptors.Descriptor
790            getDescriptor() {
791          return org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.internal_static_hadoop_common_RpcResponseHeaderProto_descriptor;
792        }
793        
794        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
795            internalGetFieldAccessorTable() {
796          return org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.internal_static_hadoop_common_RpcResponseHeaderProto_fieldAccessorTable;
797        }
798        
799        private int bitField0_;
800        // required uint32 callId = 1;
801        public static final int CALLID_FIELD_NUMBER = 1;
802        private int callId_;
803        public boolean hasCallId() {
804          return ((bitField0_ & 0x00000001) == 0x00000001);
805        }
806        public int getCallId() {
807          return callId_;
808        }
809        
810        // required .hadoop.common.RpcStatusProto status = 2;
811        public static final int STATUS_FIELD_NUMBER = 2;
812        private org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcStatusProto status_;
813        public boolean hasStatus() {
814          return ((bitField0_ & 0x00000002) == 0x00000002);
815        }
816        public org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcStatusProto getStatus() {
817          return status_;
818        }
819        
820        // optional uint32 serverIpcVersionNum = 3;
821        public static final int SERVERIPCVERSIONNUM_FIELD_NUMBER = 3;
822        private int serverIpcVersionNum_;
823        public boolean hasServerIpcVersionNum() {
824          return ((bitField0_ & 0x00000004) == 0x00000004);
825        }
826        public int getServerIpcVersionNum() {
827          return serverIpcVersionNum_;
828        }
829        
830        private void initFields() {
831          callId_ = 0;
832          status_ = org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcStatusProto.SUCCESS;
833          serverIpcVersionNum_ = 0;
834        }
835        private byte memoizedIsInitialized = -1;
836        public final boolean isInitialized() {
837          byte isInitialized = memoizedIsInitialized;
838          if (isInitialized != -1) return isInitialized == 1;
839          
840          if (!hasCallId()) {
841            memoizedIsInitialized = 0;
842            return false;
843          }
844          if (!hasStatus()) {
845            memoizedIsInitialized = 0;
846            return false;
847          }
848          memoizedIsInitialized = 1;
849          return true;
850        }
851        
852        public void writeTo(com.google.protobuf.CodedOutputStream output)
853                            throws java.io.IOException {
854          getSerializedSize();
855          if (((bitField0_ & 0x00000001) == 0x00000001)) {
856            output.writeUInt32(1, callId_);
857          }
858          if (((bitField0_ & 0x00000002) == 0x00000002)) {
859            output.writeEnum(2, status_.getNumber());
860          }
861          if (((bitField0_ & 0x00000004) == 0x00000004)) {
862            output.writeUInt32(3, serverIpcVersionNum_);
863          }
864          getUnknownFields().writeTo(output);
865        }
866        
867        private int memoizedSerializedSize = -1;
868        public int getSerializedSize() {
869          int size = memoizedSerializedSize;
870          if (size != -1) return size;
871        
872          size = 0;
873          if (((bitField0_ & 0x00000001) == 0x00000001)) {
874            size += com.google.protobuf.CodedOutputStream
875              .computeUInt32Size(1, callId_);
876          }
877          if (((bitField0_ & 0x00000002) == 0x00000002)) {
878            size += com.google.protobuf.CodedOutputStream
879              .computeEnumSize(2, status_.getNumber());
880          }
881          if (((bitField0_ & 0x00000004) == 0x00000004)) {
882            size += com.google.protobuf.CodedOutputStream
883              .computeUInt32Size(3, serverIpcVersionNum_);
884          }
885          size += getUnknownFields().getSerializedSize();
886          memoizedSerializedSize = size;
887          return size;
888        }
889        
890        private static final long serialVersionUID = 0L;
891        @java.lang.Override
892        protected java.lang.Object writeReplace()
893            throws java.io.ObjectStreamException {
894          return super.writeReplace();
895        }
896        
897        @java.lang.Override
898        public boolean equals(final java.lang.Object obj) {
899          if (obj == this) {
900           return true;
901          }
902          if (!(obj instanceof org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcResponseHeaderProto)) {
903            return super.equals(obj);
904          }
905          org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcResponseHeaderProto other = (org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcResponseHeaderProto) obj;
906          
907          boolean result = true;
908          result = result && (hasCallId() == other.hasCallId());
909          if (hasCallId()) {
910            result = result && (getCallId()
911                == other.getCallId());
912          }
913          result = result && (hasStatus() == other.hasStatus());
914          if (hasStatus()) {
915            result = result &&
916                (getStatus() == other.getStatus());
917          }
918          result = result && (hasServerIpcVersionNum() == other.hasServerIpcVersionNum());
919          if (hasServerIpcVersionNum()) {
920            result = result && (getServerIpcVersionNum()
921                == other.getServerIpcVersionNum());
922          }
923          result = result &&
924              getUnknownFields().equals(other.getUnknownFields());
925          return result;
926        }
927        
928        @java.lang.Override
929        public int hashCode() {
930          int hash = 41;
931          hash = (19 * hash) + getDescriptorForType().hashCode();
932          if (hasCallId()) {
933            hash = (37 * hash) + CALLID_FIELD_NUMBER;
934            hash = (53 * hash) + getCallId();
935          }
936          if (hasStatus()) {
937            hash = (37 * hash) + STATUS_FIELD_NUMBER;
938            hash = (53 * hash) + hashEnum(getStatus());
939          }
940          if (hasServerIpcVersionNum()) {
941            hash = (37 * hash) + SERVERIPCVERSIONNUM_FIELD_NUMBER;
942            hash = (53 * hash) + getServerIpcVersionNum();
943          }
944          hash = (29 * hash) + getUnknownFields().hashCode();
945          return hash;
946        }
947        
948        public static org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcResponseHeaderProto parseFrom(
949            com.google.protobuf.ByteString data)
950            throws com.google.protobuf.InvalidProtocolBufferException {
951          return newBuilder().mergeFrom(data).buildParsed();
952        }
953        public static org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcResponseHeaderProto parseFrom(
954            com.google.protobuf.ByteString data,
955            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
956            throws com.google.protobuf.InvalidProtocolBufferException {
957          return newBuilder().mergeFrom(data, extensionRegistry)
958                   .buildParsed();
959        }
960        public static org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcResponseHeaderProto parseFrom(byte[] data)
961            throws com.google.protobuf.InvalidProtocolBufferException {
962          return newBuilder().mergeFrom(data).buildParsed();
963        }
964        public static org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcResponseHeaderProto parseFrom(
965            byte[] data,
966            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
967            throws com.google.protobuf.InvalidProtocolBufferException {
968          return newBuilder().mergeFrom(data, extensionRegistry)
969                   .buildParsed();
970        }
971        public static org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcResponseHeaderProto parseFrom(java.io.InputStream input)
972            throws java.io.IOException {
973          return newBuilder().mergeFrom(input).buildParsed();
974        }
975        public static org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcResponseHeaderProto parseFrom(
976            java.io.InputStream input,
977            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
978            throws java.io.IOException {
979          return newBuilder().mergeFrom(input, extensionRegistry)
980                   .buildParsed();
981        }
982        public static org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcResponseHeaderProto parseDelimitedFrom(java.io.InputStream input)
983            throws java.io.IOException {
984          Builder builder = newBuilder();
985          if (builder.mergeDelimitedFrom(input)) {
986            return builder.buildParsed();
987          } else {
988            return null;
989          }
990        }
991        public static org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcResponseHeaderProto parseDelimitedFrom(
992            java.io.InputStream input,
993            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
994            throws java.io.IOException {
995          Builder builder = newBuilder();
996          if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
997            return builder.buildParsed();
998          } else {
999            return null;
1000          }
1001        }
1002        public static org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcResponseHeaderProto parseFrom(
1003            com.google.protobuf.CodedInputStream input)
1004            throws java.io.IOException {
1005          return newBuilder().mergeFrom(input).buildParsed();
1006        }
1007        public static org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcResponseHeaderProto parseFrom(
1008            com.google.protobuf.CodedInputStream input,
1009            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1010            throws java.io.IOException {
1011          return newBuilder().mergeFrom(input, extensionRegistry)
1012                   .buildParsed();
1013        }
1014        
1015        public static Builder newBuilder() { return Builder.create(); }
1016        public Builder newBuilderForType() { return newBuilder(); }
1017        public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcResponseHeaderProto prototype) {
1018          return newBuilder().mergeFrom(prototype);
1019        }
1020        public Builder toBuilder() { return newBuilder(this); }
1021        
1022        @java.lang.Override
1023        protected Builder newBuilderForType(
1024            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1025          Builder builder = new Builder(parent);
1026          return builder;
1027        }
1028        public static final class Builder extends
1029            com.google.protobuf.GeneratedMessage.Builder<Builder>
1030           implements org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcResponseHeaderProtoOrBuilder {
1031          public static final com.google.protobuf.Descriptors.Descriptor
1032              getDescriptor() {
1033            return org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.internal_static_hadoop_common_RpcResponseHeaderProto_descriptor;
1034          }
1035          
1036          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1037              internalGetFieldAccessorTable() {
1038            return org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.internal_static_hadoop_common_RpcResponseHeaderProto_fieldAccessorTable;
1039          }
1040          
1041          // Construct using org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcResponseHeaderProto.newBuilder()
1042          private Builder() {
1043            maybeForceBuilderInitialization();
1044          }
1045          
1046          private Builder(BuilderParent parent) {
1047            super(parent);
1048            maybeForceBuilderInitialization();
1049          }
1050          private void maybeForceBuilderInitialization() {
1051            if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
1052            }
1053          }
1054          private static Builder create() {
1055            return new Builder();
1056          }
1057          
1058          public Builder clear() {
1059            super.clear();
1060            callId_ = 0;
1061            bitField0_ = (bitField0_ & ~0x00000001);
1062            status_ = org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcStatusProto.SUCCESS;
1063            bitField0_ = (bitField0_ & ~0x00000002);
1064            serverIpcVersionNum_ = 0;
1065            bitField0_ = (bitField0_ & ~0x00000004);
1066            return this;
1067          }
1068          
1069          public Builder clone() {
1070            return create().mergeFrom(buildPartial());
1071          }
1072          
1073          public com.google.protobuf.Descriptors.Descriptor
1074              getDescriptorForType() {
1075            return org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcResponseHeaderProto.getDescriptor();
1076          }
1077          
1078          public org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcResponseHeaderProto getDefaultInstanceForType() {
1079            return org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcResponseHeaderProto.getDefaultInstance();
1080          }
1081          
1082          public org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcResponseHeaderProto build() {
1083            org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcResponseHeaderProto result = buildPartial();
1084            if (!result.isInitialized()) {
1085              throw newUninitializedMessageException(result);
1086            }
1087            return result;
1088          }
1089          
1090          private org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcResponseHeaderProto buildParsed()
1091              throws com.google.protobuf.InvalidProtocolBufferException {
1092            org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcResponseHeaderProto result = buildPartial();
1093            if (!result.isInitialized()) {
1094              throw newUninitializedMessageException(
1095                result).asInvalidProtocolBufferException();
1096            }
1097            return result;
1098          }
1099          
1100          public org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcResponseHeaderProto buildPartial() {
1101            org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcResponseHeaderProto result = new org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcResponseHeaderProto(this);
1102            int from_bitField0_ = bitField0_;
1103            int to_bitField0_ = 0;
1104            if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
1105              to_bitField0_ |= 0x00000001;
1106            }
1107            result.callId_ = callId_;
1108            if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
1109              to_bitField0_ |= 0x00000002;
1110            }
1111            result.status_ = status_;
1112            if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
1113              to_bitField0_ |= 0x00000004;
1114            }
1115            result.serverIpcVersionNum_ = serverIpcVersionNum_;
1116            result.bitField0_ = to_bitField0_;
1117            onBuilt();
1118            return result;
1119          }
1120          
1121          public Builder mergeFrom(com.google.protobuf.Message other) {
1122            if (other instanceof org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcResponseHeaderProto) {
1123              return mergeFrom((org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcResponseHeaderProto)other);
1124            } else {
1125              super.mergeFrom(other);
1126              return this;
1127            }
1128          }
1129          
1130          public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcResponseHeaderProto other) {
1131            if (other == org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcResponseHeaderProto.getDefaultInstance()) return this;
1132            if (other.hasCallId()) {
1133              setCallId(other.getCallId());
1134            }
1135            if (other.hasStatus()) {
1136              setStatus(other.getStatus());
1137            }
1138            if (other.hasServerIpcVersionNum()) {
1139              setServerIpcVersionNum(other.getServerIpcVersionNum());
1140            }
1141            this.mergeUnknownFields(other.getUnknownFields());
1142            return this;
1143          }
1144          
1145          public final boolean isInitialized() {
1146            if (!hasCallId()) {
1147              
1148              return false;
1149            }
1150            if (!hasStatus()) {
1151              
1152              return false;
1153            }
1154            return true;
1155          }
1156          
1157          public Builder mergeFrom(
1158              com.google.protobuf.CodedInputStream input,
1159              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1160              throws java.io.IOException {
1161            com.google.protobuf.UnknownFieldSet.Builder unknownFields =
1162              com.google.protobuf.UnknownFieldSet.newBuilder(
1163                this.getUnknownFields());
1164            while (true) {
1165              int tag = input.readTag();
1166              switch (tag) {
1167                case 0:
1168                  this.setUnknownFields(unknownFields.build());
1169                  onChanged();
1170                  return this;
1171                default: {
1172                  if (!parseUnknownField(input, unknownFields,
1173                                         extensionRegistry, tag)) {
1174                    this.setUnknownFields(unknownFields.build());
1175                    onChanged();
1176                    return this;
1177                  }
1178                  break;
1179                }
1180                case 8: {
1181                  bitField0_ |= 0x00000001;
1182                  callId_ = input.readUInt32();
1183                  break;
1184                }
1185                case 16: {
1186                  int rawValue = input.readEnum();
1187                  org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcStatusProto value = org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcStatusProto.valueOf(rawValue);
1188                  if (value == null) {
1189                    unknownFields.mergeVarintField(2, rawValue);
1190                  } else {
1191                    bitField0_ |= 0x00000002;
1192                    status_ = value;
1193                  }
1194                  break;
1195                }
1196                case 24: {
1197                  bitField0_ |= 0x00000004;
1198                  serverIpcVersionNum_ = input.readUInt32();
1199                  break;
1200                }
1201              }
1202            }
1203          }
1204          
1205          private int bitField0_;
1206          
1207          // required uint32 callId = 1;
1208          private int callId_ ;
1209          public boolean hasCallId() {
1210            return ((bitField0_ & 0x00000001) == 0x00000001);
1211          }
1212          public int getCallId() {
1213            return callId_;
1214          }
1215          public Builder setCallId(int value) {
1216            bitField0_ |= 0x00000001;
1217            callId_ = value;
1218            onChanged();
1219            return this;
1220          }
1221          public Builder clearCallId() {
1222            bitField0_ = (bitField0_ & ~0x00000001);
1223            callId_ = 0;
1224            onChanged();
1225            return this;
1226          }
1227          
1228          // required .hadoop.common.RpcStatusProto status = 2;
1229          private org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcStatusProto status_ = org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcStatusProto.SUCCESS;
1230          public boolean hasStatus() {
1231            return ((bitField0_ & 0x00000002) == 0x00000002);
1232          }
1233          public org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcStatusProto getStatus() {
1234            return status_;
1235          }
1236          public Builder setStatus(org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcStatusProto value) {
1237            if (value == null) {
1238              throw new NullPointerException();
1239            }
1240            bitField0_ |= 0x00000002;
1241            status_ = value;
1242            onChanged();
1243            return this;
1244          }
1245          public Builder clearStatus() {
1246            bitField0_ = (bitField0_ & ~0x00000002);
1247            status_ = org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcStatusProto.SUCCESS;
1248            onChanged();
1249            return this;
1250          }
1251          
1252          // optional uint32 serverIpcVersionNum = 3;
1253          private int serverIpcVersionNum_ ;
1254          public boolean hasServerIpcVersionNum() {
1255            return ((bitField0_ & 0x00000004) == 0x00000004);
1256          }
1257          public int getServerIpcVersionNum() {
1258            return serverIpcVersionNum_;
1259          }
1260          public Builder setServerIpcVersionNum(int value) {
1261            bitField0_ |= 0x00000004;
1262            serverIpcVersionNum_ = value;
1263            onChanged();
1264            return this;
1265          }
1266          public Builder clearServerIpcVersionNum() {
1267            bitField0_ = (bitField0_ & ~0x00000004);
1268            serverIpcVersionNum_ = 0;
1269            onChanged();
1270            return this;
1271          }
1272          
1273          // @@protoc_insertion_point(builder_scope:hadoop.common.RpcResponseHeaderProto)
1274        }
1275        
1276        static {
1277          defaultInstance = new RpcResponseHeaderProto(true);
1278          defaultInstance.initFields();
1279        }
1280        
1281        // @@protoc_insertion_point(class_scope:hadoop.common.RpcResponseHeaderProto)
1282      }
1283      
1284      private static com.google.protobuf.Descriptors.Descriptor
1285        internal_static_hadoop_common_RpcPayloadHeaderProto_descriptor;
1286      private static
1287        com.google.protobuf.GeneratedMessage.FieldAccessorTable
1288          internal_static_hadoop_common_RpcPayloadHeaderProto_fieldAccessorTable;
1289      private static com.google.protobuf.Descriptors.Descriptor
1290        internal_static_hadoop_common_RpcResponseHeaderProto_descriptor;
1291      private static
1292        com.google.protobuf.GeneratedMessage.FieldAccessorTable
1293          internal_static_hadoop_common_RpcResponseHeaderProto_fieldAccessorTable;
1294      
1295      public static com.google.protobuf.Descriptors.FileDescriptor
1296          getDescriptor() {
1297        return descriptor;
1298      }
1299      private static com.google.protobuf.Descriptors.FileDescriptor
1300          descriptor;
1301      static {
1302        java.lang.String[] descriptorData = {
1303          "\n\026RpcPayloadHeader.proto\022\rhadoop.common\"" +
1304          "\215\001\n\025RpcPayloadHeaderProto\022,\n\007rpcKind\030\001 \001" +
1305          "(\0162\033.hadoop.common.RpcKindProto\0226\n\005rpcOp" +
1306          "\030\002 \001(\0162\'.hadoop.common.RpcPayloadOperati" +
1307          "onProto\022\016\n\006callId\030\003 \002(\r\"t\n\026RpcResponseHe" +
1308          "aderProto\022\016\n\006callId\030\001 \002(\r\022-\n\006status\030\002 \002(" +
1309          "\0162\035.hadoop.common.RpcStatusProto\022\033\n\023serv" +
1310          "erIpcVersionNum\030\003 \001(\r*J\n\014RpcKindProto\022\017\n" +
1311          "\013RPC_BUILTIN\020\000\022\020\n\014RPC_WRITABLE\020\001\022\027\n\023RPC_" +
1312          "PROTOCOL_BUFFER\020\002*i\n\030RpcPayloadOperation",
1313          "Proto\022\025\n\021RPC_FINAL_PAYLOAD\020\000\022\034\n\030RPC_CONT" +
1314          "INUATION_PAYLOAD\020\001\022\030\n\024RPC_CLOSE_CONNECTI" +
1315          "ON\020\002*3\n\016RpcStatusProto\022\013\n\007SUCCESS\020\000\022\t\n\005E" +
1316          "RROR\020\001\022\t\n\005FATAL\020\002B;\n\036org.apache.hadoop.i" +
1317          "pc.protobufB\026RpcPayloadHeaderProtos\240\001\001"
1318        };
1319        com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
1320          new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
1321            public com.google.protobuf.ExtensionRegistry assignDescriptors(
1322                com.google.protobuf.Descriptors.FileDescriptor root) {
1323              descriptor = root;
1324              internal_static_hadoop_common_RpcPayloadHeaderProto_descriptor =
1325                getDescriptor().getMessageTypes().get(0);
1326              internal_static_hadoop_common_RpcPayloadHeaderProto_fieldAccessorTable = new
1327                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
1328                  internal_static_hadoop_common_RpcPayloadHeaderProto_descriptor,
1329                  new java.lang.String[] { "RpcKind", "RpcOp", "CallId", },
1330                  org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadHeaderProto.class,
1331                  org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadHeaderProto.Builder.class);
1332              internal_static_hadoop_common_RpcResponseHeaderProto_descriptor =
1333                getDescriptor().getMessageTypes().get(1);
1334              internal_static_hadoop_common_RpcResponseHeaderProto_fieldAccessorTable = new
1335                com.google.protobuf.GeneratedMessage.FieldAccessorTable(
1336                  internal_static_hadoop_common_RpcResponseHeaderProto_descriptor,
1337                  new java.lang.String[] { "CallId", "Status", "ServerIpcVersionNum", },
1338                  org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcResponseHeaderProto.class,
1339                  org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcResponseHeaderProto.Builder.class);
1340              return null;
1341            }
1342          };
1343        com.google.protobuf.Descriptors.FileDescriptor
1344          .internalBuildGeneratedFileFrom(descriptorData,
1345            new com.google.protobuf.Descriptors.FileDescriptor[] {
1346            }, assigner);
1347      }
1348      
1349      // @@protoc_insertion_point(outer_class_scope)
1350    }