001// Generated by the protocol buffer compiler.  DO NOT EDIT!
002// source: test.proto
003
004package org.apache.hadoop.ipc.protobuf;
005
006public final class TestProtos {
007  private TestProtos() {}
008  public static void registerAllExtensions(
009      com.google.protobuf.ExtensionRegistry registry) {
010  }
011  public interface EmptyRequestProtoOrBuilder
012      extends com.google.protobuf.MessageOrBuilder {
013  }
014  /**
015   * Protobuf type {@code hadoop.common.EmptyRequestProto}
016   */
017  public static final class EmptyRequestProto extends
018      com.google.protobuf.GeneratedMessage
019      implements EmptyRequestProtoOrBuilder {
020    // Use EmptyRequestProto.newBuilder() to construct.
021    private EmptyRequestProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
022      super(builder);
023      this.unknownFields = builder.getUnknownFields();
024    }
025    private EmptyRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
026
027    private static final EmptyRequestProto defaultInstance;
028    public static EmptyRequestProto getDefaultInstance() {
029      return defaultInstance;
030    }
031
032    public EmptyRequestProto getDefaultInstanceForType() {
033      return defaultInstance;
034    }
035
036    private final com.google.protobuf.UnknownFieldSet unknownFields;
037    @java.lang.Override
038    public final com.google.protobuf.UnknownFieldSet
039        getUnknownFields() {
040      return this.unknownFields;
041    }
042    private EmptyRequestProto(
043        com.google.protobuf.CodedInputStream input,
044        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
045        throws com.google.protobuf.InvalidProtocolBufferException {
046      initFields();
047      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
048          com.google.protobuf.UnknownFieldSet.newBuilder();
049      try {
050        boolean done = false;
051        while (!done) {
052          int tag = input.readTag();
053          switch (tag) {
054            case 0:
055              done = true;
056              break;
057            default: {
058              if (!parseUnknownField(input, unknownFields,
059                                     extensionRegistry, tag)) {
060                done = true;
061              }
062              break;
063            }
064          }
065        }
066      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
067        throw e.setUnfinishedMessage(this);
068      } catch (java.io.IOException e) {
069        throw new com.google.protobuf.InvalidProtocolBufferException(
070            e.getMessage()).setUnfinishedMessage(this);
071      } finally {
072        this.unknownFields = unknownFields.build();
073        makeExtensionsImmutable();
074      }
075    }
076    public static final com.google.protobuf.Descriptors.Descriptor
077        getDescriptor() {
078      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EmptyRequestProto_descriptor;
079    }
080
081    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
082        internalGetFieldAccessorTable() {
083      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EmptyRequestProto_fieldAccessorTable
084          .ensureFieldAccessorsInitialized(
085              org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.class, org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.Builder.class);
086    }
087
088    public static com.google.protobuf.Parser<EmptyRequestProto> PARSER =
089        new com.google.protobuf.AbstractParser<EmptyRequestProto>() {
090      public EmptyRequestProto parsePartialFrom(
091          com.google.protobuf.CodedInputStream input,
092          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
093          throws com.google.protobuf.InvalidProtocolBufferException {
094        return new EmptyRequestProto(input, extensionRegistry);
095      }
096    };
097
098    @java.lang.Override
099    public com.google.protobuf.Parser<EmptyRequestProto> getParserForType() {
100      return PARSER;
101    }
102
103    private void initFields() {
104    }
105    private byte memoizedIsInitialized = -1;
106    public final boolean isInitialized() {
107      byte isInitialized = memoizedIsInitialized;
108      if (isInitialized != -1) return isInitialized == 1;
109
110      memoizedIsInitialized = 1;
111      return true;
112    }
113
114    public void writeTo(com.google.protobuf.CodedOutputStream output)
115                        throws java.io.IOException {
116      getSerializedSize();
117      getUnknownFields().writeTo(output);
118    }
119
120    private int memoizedSerializedSize = -1;
121    public int getSerializedSize() {
122      int size = memoizedSerializedSize;
123      if (size != -1) return size;
124
125      size = 0;
126      size += getUnknownFields().getSerializedSize();
127      memoizedSerializedSize = size;
128      return size;
129    }
130
131    private static final long serialVersionUID = 0L;
132    @java.lang.Override
133    protected java.lang.Object writeReplace()
134        throws java.io.ObjectStreamException {
135      return super.writeReplace();
136    }
137
138    @java.lang.Override
139    public boolean equals(final java.lang.Object obj) {
140      if (obj == this) {
141       return true;
142      }
143      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)) {
144        return super.equals(obj);
145      }
146      org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto other = (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto) obj;
147
148      boolean result = true;
149      result = result &&
150          getUnknownFields().equals(other.getUnknownFields());
151      return result;
152    }
153
154    private int memoizedHashCode = 0;
155    @java.lang.Override
156    public int hashCode() {
157      if (memoizedHashCode != 0) {
158        return memoizedHashCode;
159      }
160      int hash = 41;
161      hash = (19 * hash) + getDescriptorForType().hashCode();
162      hash = (29 * hash) + getUnknownFields().hashCode();
163      memoizedHashCode = hash;
164      return hash;
165    }
166
167    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(
168        com.google.protobuf.ByteString data)
169        throws com.google.protobuf.InvalidProtocolBufferException {
170      return PARSER.parseFrom(data);
171    }
172    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(
173        com.google.protobuf.ByteString data,
174        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
175        throws com.google.protobuf.InvalidProtocolBufferException {
176      return PARSER.parseFrom(data, extensionRegistry);
177    }
178    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(byte[] data)
179        throws com.google.protobuf.InvalidProtocolBufferException {
180      return PARSER.parseFrom(data);
181    }
182    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(
183        byte[] data,
184        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
185        throws com.google.protobuf.InvalidProtocolBufferException {
186      return PARSER.parseFrom(data, extensionRegistry);
187    }
188    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(java.io.InputStream input)
189        throws java.io.IOException {
190      return PARSER.parseFrom(input);
191    }
192    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(
193        java.io.InputStream input,
194        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
195        throws java.io.IOException {
196      return PARSER.parseFrom(input, extensionRegistry);
197    }
198    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseDelimitedFrom(java.io.InputStream input)
199        throws java.io.IOException {
200      return PARSER.parseDelimitedFrom(input);
201    }
202    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseDelimitedFrom(
203        java.io.InputStream input,
204        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
205        throws java.io.IOException {
206      return PARSER.parseDelimitedFrom(input, extensionRegistry);
207    }
208    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(
209        com.google.protobuf.CodedInputStream input)
210        throws java.io.IOException {
211      return PARSER.parseFrom(input);
212    }
213    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(
214        com.google.protobuf.CodedInputStream input,
215        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
216        throws java.io.IOException {
217      return PARSER.parseFrom(input, extensionRegistry);
218    }
219
220    public static Builder newBuilder() { return Builder.create(); }
221    public Builder newBuilderForType() { return newBuilder(); }
222    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto prototype) {
223      return newBuilder().mergeFrom(prototype);
224    }
225    public Builder toBuilder() { return newBuilder(this); }
226
227    @java.lang.Override
228    protected Builder newBuilderForType(
229        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
230      Builder builder = new Builder(parent);
231      return builder;
232    }
233    /**
234     * Protobuf type {@code hadoop.common.EmptyRequestProto}
235     */
236    public static final class Builder extends
237        com.google.protobuf.GeneratedMessage.Builder<Builder>
238       implements org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProtoOrBuilder {
239      public static final com.google.protobuf.Descriptors.Descriptor
240          getDescriptor() {
241        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EmptyRequestProto_descriptor;
242      }
243
244      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
245          internalGetFieldAccessorTable() {
246        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EmptyRequestProto_fieldAccessorTable
247            .ensureFieldAccessorsInitialized(
248                org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.class, org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.Builder.class);
249      }
250
251      // Construct using org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.newBuilder()
252      private Builder() {
253        maybeForceBuilderInitialization();
254      }
255
256      private Builder(
257          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
258        super(parent);
259        maybeForceBuilderInitialization();
260      }
261      private void maybeForceBuilderInitialization() {
262        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
263        }
264      }
265      private static Builder create() {
266        return new Builder();
267      }
268
269      public Builder clear() {
270        super.clear();
271        return this;
272      }
273
274      public Builder clone() {
275        return create().mergeFrom(buildPartial());
276      }
277
278      public com.google.protobuf.Descriptors.Descriptor
279          getDescriptorForType() {
280        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EmptyRequestProto_descriptor;
281      }
282
283      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto getDefaultInstanceForType() {
284        return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
285      }
286
287      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto build() {
288        org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto result = buildPartial();
289        if (!result.isInitialized()) {
290          throw newUninitializedMessageException(result);
291        }
292        return result;
293      }
294
295      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto buildPartial() {
296        org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto result = new org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto(this);
297        onBuilt();
298        return result;
299      }
300
301      public Builder mergeFrom(com.google.protobuf.Message other) {
302        if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto) {
303          return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)other);
304        } else {
305          super.mergeFrom(other);
306          return this;
307        }
308      }
309
310      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto other) {
311        if (other == org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance()) return this;
312        this.mergeUnknownFields(other.getUnknownFields());
313        return this;
314      }
315
316      public final boolean isInitialized() {
317        return true;
318      }
319
320      public Builder mergeFrom(
321          com.google.protobuf.CodedInputStream input,
322          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
323          throws java.io.IOException {
324        org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parsedMessage = null;
325        try {
326          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
327        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
328          parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto) e.getUnfinishedMessage();
329          throw e;
330        } finally {
331          if (parsedMessage != null) {
332            mergeFrom(parsedMessage);
333          }
334        }
335        return this;
336      }
337
338      // @@protoc_insertion_point(builder_scope:hadoop.common.EmptyRequestProto)
339    }
340
341    static {
342      defaultInstance = new EmptyRequestProto(true);
343      defaultInstance.initFields();
344    }
345
346    // @@protoc_insertion_point(class_scope:hadoop.common.EmptyRequestProto)
347  }
348
349  public interface EmptyResponseProtoOrBuilder
350      extends com.google.protobuf.MessageOrBuilder {
351  }
352  /**
353   * Protobuf type {@code hadoop.common.EmptyResponseProto}
354   */
355  public static final class EmptyResponseProto extends
356      com.google.protobuf.GeneratedMessage
357      implements EmptyResponseProtoOrBuilder {
358    // Use EmptyResponseProto.newBuilder() to construct.
359    private EmptyResponseProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
360      super(builder);
361      this.unknownFields = builder.getUnknownFields();
362    }
363    private EmptyResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
364
365    private static final EmptyResponseProto defaultInstance;
366    public static EmptyResponseProto getDefaultInstance() {
367      return defaultInstance;
368    }
369
370    public EmptyResponseProto getDefaultInstanceForType() {
371      return defaultInstance;
372    }
373
374    private final com.google.protobuf.UnknownFieldSet unknownFields;
375    @java.lang.Override
376    public final com.google.protobuf.UnknownFieldSet
377        getUnknownFields() {
378      return this.unknownFields;
379    }
380    private EmptyResponseProto(
381        com.google.protobuf.CodedInputStream input,
382        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
383        throws com.google.protobuf.InvalidProtocolBufferException {
384      initFields();
385      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
386          com.google.protobuf.UnknownFieldSet.newBuilder();
387      try {
388        boolean done = false;
389        while (!done) {
390          int tag = input.readTag();
391          switch (tag) {
392            case 0:
393              done = true;
394              break;
395            default: {
396              if (!parseUnknownField(input, unknownFields,
397                                     extensionRegistry, tag)) {
398                done = true;
399              }
400              break;
401            }
402          }
403        }
404      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
405        throw e.setUnfinishedMessage(this);
406      } catch (java.io.IOException e) {
407        throw new com.google.protobuf.InvalidProtocolBufferException(
408            e.getMessage()).setUnfinishedMessage(this);
409      } finally {
410        this.unknownFields = unknownFields.build();
411        makeExtensionsImmutable();
412      }
413    }
414    public static final com.google.protobuf.Descriptors.Descriptor
415        getDescriptor() {
416      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EmptyResponseProto_descriptor;
417    }
418
419    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
420        internalGetFieldAccessorTable() {
421      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EmptyResponseProto_fieldAccessorTable
422          .ensureFieldAccessorsInitialized(
423              org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.Builder.class);
424    }
425
426    public static com.google.protobuf.Parser<EmptyResponseProto> PARSER =
427        new com.google.protobuf.AbstractParser<EmptyResponseProto>() {
428      public EmptyResponseProto parsePartialFrom(
429          com.google.protobuf.CodedInputStream input,
430          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
431          throws com.google.protobuf.InvalidProtocolBufferException {
432        return new EmptyResponseProto(input, extensionRegistry);
433      }
434    };
435
436    @java.lang.Override
437    public com.google.protobuf.Parser<EmptyResponseProto> getParserForType() {
438      return PARSER;
439    }
440
441    private void initFields() {
442    }
443    private byte memoizedIsInitialized = -1;
444    public final boolean isInitialized() {
445      byte isInitialized = memoizedIsInitialized;
446      if (isInitialized != -1) return isInitialized == 1;
447
448      memoizedIsInitialized = 1;
449      return true;
450    }
451
452    public void writeTo(com.google.protobuf.CodedOutputStream output)
453                        throws java.io.IOException {
454      getSerializedSize();
455      getUnknownFields().writeTo(output);
456    }
457
458    private int memoizedSerializedSize = -1;
459    public int getSerializedSize() {
460      int size = memoizedSerializedSize;
461      if (size != -1) return size;
462
463      size = 0;
464      size += getUnknownFields().getSerializedSize();
465      memoizedSerializedSize = size;
466      return size;
467    }
468
469    private static final long serialVersionUID = 0L;
470    @java.lang.Override
471    protected java.lang.Object writeReplace()
472        throws java.io.ObjectStreamException {
473      return super.writeReplace();
474    }
475
476    @java.lang.Override
477    public boolean equals(final java.lang.Object obj) {
478      if (obj == this) {
479       return true;
480      }
481      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto)) {
482        return super.equals(obj);
483      }
484      org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto other = (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto) obj;
485
486      boolean result = true;
487      result = result &&
488          getUnknownFields().equals(other.getUnknownFields());
489      return result;
490    }
491
492    private int memoizedHashCode = 0;
493    @java.lang.Override
494    public int hashCode() {
495      if (memoizedHashCode != 0) {
496        return memoizedHashCode;
497      }
498      int hash = 41;
499      hash = (19 * hash) + getDescriptorForType().hashCode();
500      hash = (29 * hash) + getUnknownFields().hashCode();
501      memoizedHashCode = hash;
502      return hash;
503    }
504
505    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(
506        com.google.protobuf.ByteString data)
507        throws com.google.protobuf.InvalidProtocolBufferException {
508      return PARSER.parseFrom(data);
509    }
510    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(
511        com.google.protobuf.ByteString data,
512        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
513        throws com.google.protobuf.InvalidProtocolBufferException {
514      return PARSER.parseFrom(data, extensionRegistry);
515    }
516    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(byte[] data)
517        throws com.google.protobuf.InvalidProtocolBufferException {
518      return PARSER.parseFrom(data);
519    }
520    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(
521        byte[] data,
522        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
523        throws com.google.protobuf.InvalidProtocolBufferException {
524      return PARSER.parseFrom(data, extensionRegistry);
525    }
526    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(java.io.InputStream input)
527        throws java.io.IOException {
528      return PARSER.parseFrom(input);
529    }
530    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(
531        java.io.InputStream input,
532        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
533        throws java.io.IOException {
534      return PARSER.parseFrom(input, extensionRegistry);
535    }
536    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseDelimitedFrom(java.io.InputStream input)
537        throws java.io.IOException {
538      return PARSER.parseDelimitedFrom(input);
539    }
540    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseDelimitedFrom(
541        java.io.InputStream input,
542        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
543        throws java.io.IOException {
544      return PARSER.parseDelimitedFrom(input, extensionRegistry);
545    }
546    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(
547        com.google.protobuf.CodedInputStream input)
548        throws java.io.IOException {
549      return PARSER.parseFrom(input);
550    }
551    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(
552        com.google.protobuf.CodedInputStream input,
553        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
554        throws java.io.IOException {
555      return PARSER.parseFrom(input, extensionRegistry);
556    }
557
558    public static Builder newBuilder() { return Builder.create(); }
559    public Builder newBuilderForType() { return newBuilder(); }
560    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto prototype) {
561      return newBuilder().mergeFrom(prototype);
562    }
563    public Builder toBuilder() { return newBuilder(this); }
564
565    @java.lang.Override
566    protected Builder newBuilderForType(
567        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
568      Builder builder = new Builder(parent);
569      return builder;
570    }
571    /**
572     * Protobuf type {@code hadoop.common.EmptyResponseProto}
573     */
574    public static final class Builder extends
575        com.google.protobuf.GeneratedMessage.Builder<Builder>
576       implements org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProtoOrBuilder {
577      public static final com.google.protobuf.Descriptors.Descriptor
578          getDescriptor() {
579        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EmptyResponseProto_descriptor;
580      }
581
582      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
583          internalGetFieldAccessorTable() {
584        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EmptyResponseProto_fieldAccessorTable
585            .ensureFieldAccessorsInitialized(
586                org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.Builder.class);
587      }
588
589      // Construct using org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.newBuilder()
590      private Builder() {
591        maybeForceBuilderInitialization();
592      }
593
594      private Builder(
595          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
596        super(parent);
597        maybeForceBuilderInitialization();
598      }
599      private void maybeForceBuilderInitialization() {
600        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
601        }
602      }
603      private static Builder create() {
604        return new Builder();
605      }
606
607      public Builder clear() {
608        super.clear();
609        return this;
610      }
611
612      public Builder clone() {
613        return create().mergeFrom(buildPartial());
614      }
615
616      public com.google.protobuf.Descriptors.Descriptor
617          getDescriptorForType() {
618        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EmptyResponseProto_descriptor;
619      }
620
621      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto getDefaultInstanceForType() {
622        return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
623      }
624
625      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto build() {
626        org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto result = buildPartial();
627        if (!result.isInitialized()) {
628          throw newUninitializedMessageException(result);
629        }
630        return result;
631      }
632
633      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto buildPartial() {
634        org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto result = new org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto(this);
635        onBuilt();
636        return result;
637      }
638
639      public Builder mergeFrom(com.google.protobuf.Message other) {
640        if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto) {
641          return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto)other);
642        } else {
643          super.mergeFrom(other);
644          return this;
645        }
646      }
647
648      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto other) {
649        if (other == org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance()) return this;
650        this.mergeUnknownFields(other.getUnknownFields());
651        return this;
652      }
653
654      public final boolean isInitialized() {
655        return true;
656      }
657
658      public Builder mergeFrom(
659          com.google.protobuf.CodedInputStream input,
660          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
661          throws java.io.IOException {
662        org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parsedMessage = null;
663        try {
664          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
665        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
666          parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto) e.getUnfinishedMessage();
667          throw e;
668        } finally {
669          if (parsedMessage != null) {
670            mergeFrom(parsedMessage);
671          }
672        }
673        return this;
674      }
675
676      // @@protoc_insertion_point(builder_scope:hadoop.common.EmptyResponseProto)
677    }
678
679    static {
680      defaultInstance = new EmptyResponseProto(true);
681      defaultInstance.initFields();
682    }
683
684    // @@protoc_insertion_point(class_scope:hadoop.common.EmptyResponseProto)
685  }
686
687  public interface EchoRequestProtoOrBuilder
688      extends com.google.protobuf.MessageOrBuilder {
689
690    // required string message = 1;
691    /**
692     * <code>required string message = 1;</code>
693     */
694    boolean hasMessage();
695    /**
696     * <code>required string message = 1;</code>
697     */
698    java.lang.String getMessage();
699    /**
700     * <code>required string message = 1;</code>
701     */
702    com.google.protobuf.ByteString
703        getMessageBytes();
704  }
705  /**
706   * Protobuf type {@code hadoop.common.EchoRequestProto}
707   */
708  public static final class EchoRequestProto extends
709      com.google.protobuf.GeneratedMessage
710      implements EchoRequestProtoOrBuilder {
711    // Use EchoRequestProto.newBuilder() to construct.
712    private EchoRequestProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
713      super(builder);
714      this.unknownFields = builder.getUnknownFields();
715    }
716    private EchoRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
717
718    private static final EchoRequestProto defaultInstance;
719    public static EchoRequestProto getDefaultInstance() {
720      return defaultInstance;
721    }
722
723    public EchoRequestProto getDefaultInstanceForType() {
724      return defaultInstance;
725    }
726
727    private final com.google.protobuf.UnknownFieldSet unknownFields;
728    @java.lang.Override
729    public final com.google.protobuf.UnknownFieldSet
730        getUnknownFields() {
731      return this.unknownFields;
732    }
733    private EchoRequestProto(
734        com.google.protobuf.CodedInputStream input,
735        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
736        throws com.google.protobuf.InvalidProtocolBufferException {
737      initFields();
738      int mutable_bitField0_ = 0;
739      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
740          com.google.protobuf.UnknownFieldSet.newBuilder();
741      try {
742        boolean done = false;
743        while (!done) {
744          int tag = input.readTag();
745          switch (tag) {
746            case 0:
747              done = true;
748              break;
749            default: {
750              if (!parseUnknownField(input, unknownFields,
751                                     extensionRegistry, tag)) {
752                done = true;
753              }
754              break;
755            }
756            case 10: {
757              bitField0_ |= 0x00000001;
758              message_ = input.readBytes();
759              break;
760            }
761          }
762        }
763      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
764        throw e.setUnfinishedMessage(this);
765      } catch (java.io.IOException e) {
766        throw new com.google.protobuf.InvalidProtocolBufferException(
767            e.getMessage()).setUnfinishedMessage(this);
768      } finally {
769        this.unknownFields = unknownFields.build();
770        makeExtensionsImmutable();
771      }
772    }
773    public static final com.google.protobuf.Descriptors.Descriptor
774        getDescriptor() {
775      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoRequestProto_descriptor;
776    }
777
778    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
779        internalGetFieldAccessorTable() {
780      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoRequestProto_fieldAccessorTable
781          .ensureFieldAccessorsInitialized(
782              org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.class, org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.Builder.class);
783    }
784
785    public static com.google.protobuf.Parser<EchoRequestProto> PARSER =
786        new com.google.protobuf.AbstractParser<EchoRequestProto>() {
787      public EchoRequestProto parsePartialFrom(
788          com.google.protobuf.CodedInputStream input,
789          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
790          throws com.google.protobuf.InvalidProtocolBufferException {
791        return new EchoRequestProto(input, extensionRegistry);
792      }
793    };
794
795    @java.lang.Override
796    public com.google.protobuf.Parser<EchoRequestProto> getParserForType() {
797      return PARSER;
798    }
799
800    private int bitField0_;
801    // required string message = 1;
802    public static final int MESSAGE_FIELD_NUMBER = 1;
803    private java.lang.Object message_;
804    /**
805     * <code>required string message = 1;</code>
806     */
807    public boolean hasMessage() {
808      return ((bitField0_ & 0x00000001) == 0x00000001);
809    }
810    /**
811     * <code>required string message = 1;</code>
812     */
813    public java.lang.String getMessage() {
814      java.lang.Object ref = message_;
815      if (ref instanceof java.lang.String) {
816        return (java.lang.String) ref;
817      } else {
818        com.google.protobuf.ByteString bs = 
819            (com.google.protobuf.ByteString) ref;
820        java.lang.String s = bs.toStringUtf8();
821        if (bs.isValidUtf8()) {
822          message_ = s;
823        }
824        return s;
825      }
826    }
827    /**
828     * <code>required string message = 1;</code>
829     */
830    public com.google.protobuf.ByteString
831        getMessageBytes() {
832      java.lang.Object ref = message_;
833      if (ref instanceof java.lang.String) {
834        com.google.protobuf.ByteString b = 
835            com.google.protobuf.ByteString.copyFromUtf8(
836                (java.lang.String) ref);
837        message_ = b;
838        return b;
839      } else {
840        return (com.google.protobuf.ByteString) ref;
841      }
842    }
843
844    private void initFields() {
845      message_ = "";
846    }
847    private byte memoizedIsInitialized = -1;
848    public final boolean isInitialized() {
849      byte isInitialized = memoizedIsInitialized;
850      if (isInitialized != -1) return isInitialized == 1;
851
852      if (!hasMessage()) {
853        memoizedIsInitialized = 0;
854        return false;
855      }
856      memoizedIsInitialized = 1;
857      return true;
858    }
859
860    public void writeTo(com.google.protobuf.CodedOutputStream output)
861                        throws java.io.IOException {
862      getSerializedSize();
863      if (((bitField0_ & 0x00000001) == 0x00000001)) {
864        output.writeBytes(1, getMessageBytes());
865      }
866      getUnknownFields().writeTo(output);
867    }
868
869    private int memoizedSerializedSize = -1;
870    public int getSerializedSize() {
871      int size = memoizedSerializedSize;
872      if (size != -1) return size;
873
874      size = 0;
875      if (((bitField0_ & 0x00000001) == 0x00000001)) {
876        size += com.google.protobuf.CodedOutputStream
877          .computeBytesSize(1, getMessageBytes());
878      }
879      size += getUnknownFields().getSerializedSize();
880      memoizedSerializedSize = size;
881      return size;
882    }
883
884    private static final long serialVersionUID = 0L;
885    @java.lang.Override
886    protected java.lang.Object writeReplace()
887        throws java.io.ObjectStreamException {
888      return super.writeReplace();
889    }
890
891    @java.lang.Override
892    public boolean equals(final java.lang.Object obj) {
893      if (obj == this) {
894       return true;
895      }
896      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto)) {
897        return super.equals(obj);
898      }
899      org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto other = (org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto) obj;
900
901      boolean result = true;
902      result = result && (hasMessage() == other.hasMessage());
903      if (hasMessage()) {
904        result = result && getMessage()
905            .equals(other.getMessage());
906      }
907      result = result &&
908          getUnknownFields().equals(other.getUnknownFields());
909      return result;
910    }
911
912    private int memoizedHashCode = 0;
913    @java.lang.Override
914    public int hashCode() {
915      if (memoizedHashCode != 0) {
916        return memoizedHashCode;
917      }
918      int hash = 41;
919      hash = (19 * hash) + getDescriptorForType().hashCode();
920      if (hasMessage()) {
921        hash = (37 * hash) + MESSAGE_FIELD_NUMBER;
922        hash = (53 * hash) + getMessage().hashCode();
923      }
924      hash = (29 * hash) + getUnknownFields().hashCode();
925      memoizedHashCode = hash;
926      return hash;
927    }
928
929    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(
930        com.google.protobuf.ByteString data)
931        throws com.google.protobuf.InvalidProtocolBufferException {
932      return PARSER.parseFrom(data);
933    }
934    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(
935        com.google.protobuf.ByteString data,
936        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
937        throws com.google.protobuf.InvalidProtocolBufferException {
938      return PARSER.parseFrom(data, extensionRegistry);
939    }
940    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(byte[] data)
941        throws com.google.protobuf.InvalidProtocolBufferException {
942      return PARSER.parseFrom(data);
943    }
944    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(
945        byte[] data,
946        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
947        throws com.google.protobuf.InvalidProtocolBufferException {
948      return PARSER.parseFrom(data, extensionRegistry);
949    }
950    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(java.io.InputStream input)
951        throws java.io.IOException {
952      return PARSER.parseFrom(input);
953    }
954    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(
955        java.io.InputStream input,
956        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
957        throws java.io.IOException {
958      return PARSER.parseFrom(input, extensionRegistry);
959    }
960    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseDelimitedFrom(java.io.InputStream input)
961        throws java.io.IOException {
962      return PARSER.parseDelimitedFrom(input);
963    }
964    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseDelimitedFrom(
965        java.io.InputStream input,
966        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
967        throws java.io.IOException {
968      return PARSER.parseDelimitedFrom(input, extensionRegistry);
969    }
970    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(
971        com.google.protobuf.CodedInputStream input)
972        throws java.io.IOException {
973      return PARSER.parseFrom(input);
974    }
975    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(
976        com.google.protobuf.CodedInputStream input,
977        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
978        throws java.io.IOException {
979      return PARSER.parseFrom(input, extensionRegistry);
980    }
981
982    public static Builder newBuilder() { return Builder.create(); }
983    public Builder newBuilderForType() { return newBuilder(); }
984    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto prototype) {
985      return newBuilder().mergeFrom(prototype);
986    }
987    public Builder toBuilder() { return newBuilder(this); }
988
989    @java.lang.Override
990    protected Builder newBuilderForType(
991        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
992      Builder builder = new Builder(parent);
993      return builder;
994    }
995    /**
996     * Protobuf type {@code hadoop.common.EchoRequestProto}
997     */
998    public static final class Builder extends
999        com.google.protobuf.GeneratedMessage.Builder<Builder>
1000       implements org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProtoOrBuilder {
1001      public static final com.google.protobuf.Descriptors.Descriptor
1002          getDescriptor() {
1003        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoRequestProto_descriptor;
1004      }
1005
1006      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1007          internalGetFieldAccessorTable() {
1008        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoRequestProto_fieldAccessorTable
1009            .ensureFieldAccessorsInitialized(
1010                org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.class, org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.Builder.class);
1011      }
1012
1013      // Construct using org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.newBuilder()
1014      private Builder() {
1015        maybeForceBuilderInitialization();
1016      }
1017
1018      private Builder(
1019          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1020        super(parent);
1021        maybeForceBuilderInitialization();
1022      }
1023      private void maybeForceBuilderInitialization() {
1024        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
1025        }
1026      }
1027      private static Builder create() {
1028        return new Builder();
1029      }
1030
1031      public Builder clear() {
1032        super.clear();
1033        message_ = "";
1034        bitField0_ = (bitField0_ & ~0x00000001);
1035        return this;
1036      }
1037
1038      public Builder clone() {
1039        return create().mergeFrom(buildPartial());
1040      }
1041
1042      public com.google.protobuf.Descriptors.Descriptor
1043          getDescriptorForType() {
1044        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoRequestProto_descriptor;
1045      }
1046
1047      public org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto getDefaultInstanceForType() {
1048        return org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.getDefaultInstance();
1049      }
1050
1051      public org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto build() {
1052        org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto result = buildPartial();
1053        if (!result.isInitialized()) {
1054          throw newUninitializedMessageException(result);
1055        }
1056        return result;
1057      }
1058
1059      public org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto buildPartial() {
1060        org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto result = new org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto(this);
1061        int from_bitField0_ = bitField0_;
1062        int to_bitField0_ = 0;
1063        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
1064          to_bitField0_ |= 0x00000001;
1065        }
1066        result.message_ = message_;
1067        result.bitField0_ = to_bitField0_;
1068        onBuilt();
1069        return result;
1070      }
1071
1072      public Builder mergeFrom(com.google.protobuf.Message other) {
1073        if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto) {
1074          return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto)other);
1075        } else {
1076          super.mergeFrom(other);
1077          return this;
1078        }
1079      }
1080
1081      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto other) {
1082        if (other == org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.getDefaultInstance()) return this;
1083        if (other.hasMessage()) {
1084          bitField0_ |= 0x00000001;
1085          message_ = other.message_;
1086          onChanged();
1087        }
1088        this.mergeUnknownFields(other.getUnknownFields());
1089        return this;
1090      }
1091
1092      public final boolean isInitialized() {
1093        if (!hasMessage()) {
1094          
1095          return false;
1096        }
1097        return true;
1098      }
1099
1100      public Builder mergeFrom(
1101          com.google.protobuf.CodedInputStream input,
1102          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1103          throws java.io.IOException {
1104        org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parsedMessage = null;
1105        try {
1106          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
1107        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1108          parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto) e.getUnfinishedMessage();
1109          throw e;
1110        } finally {
1111          if (parsedMessage != null) {
1112            mergeFrom(parsedMessage);
1113          }
1114        }
1115        return this;
1116      }
1117      private int bitField0_;
1118
1119      // required string message = 1;
1120      private java.lang.Object message_ = "";
1121      /**
1122       * <code>required string message = 1;</code>
1123       */
1124      public boolean hasMessage() {
1125        return ((bitField0_ & 0x00000001) == 0x00000001);
1126      }
1127      /**
1128       * <code>required string message = 1;</code>
1129       */
1130      public java.lang.String getMessage() {
1131        java.lang.Object ref = message_;
1132        if (!(ref instanceof java.lang.String)) {
1133          java.lang.String s = ((com.google.protobuf.ByteString) ref)
1134              .toStringUtf8();
1135          message_ = s;
1136          return s;
1137        } else {
1138          return (java.lang.String) ref;
1139        }
1140      }
1141      /**
1142       * <code>required string message = 1;</code>
1143       */
1144      public com.google.protobuf.ByteString
1145          getMessageBytes() {
1146        java.lang.Object ref = message_;
1147        if (ref instanceof String) {
1148          com.google.protobuf.ByteString b = 
1149              com.google.protobuf.ByteString.copyFromUtf8(
1150                  (java.lang.String) ref);
1151          message_ = b;
1152          return b;
1153        } else {
1154          return (com.google.protobuf.ByteString) ref;
1155        }
1156      }
1157      /**
1158       * <code>required string message = 1;</code>
1159       */
1160      public Builder setMessage(
1161          java.lang.String value) {
1162        if (value == null) {
1163    throw new NullPointerException();
1164  }
1165  bitField0_ |= 0x00000001;
1166        message_ = value;
1167        onChanged();
1168        return this;
1169      }
1170      /**
1171       * <code>required string message = 1;</code>
1172       */
1173      public Builder clearMessage() {
1174        bitField0_ = (bitField0_ & ~0x00000001);
1175        message_ = getDefaultInstance().getMessage();
1176        onChanged();
1177        return this;
1178      }
1179      /**
1180       * <code>required string message = 1;</code>
1181       */
1182      public Builder setMessageBytes(
1183          com.google.protobuf.ByteString value) {
1184        if (value == null) {
1185    throw new NullPointerException();
1186  }
1187  bitField0_ |= 0x00000001;
1188        message_ = value;
1189        onChanged();
1190        return this;
1191      }
1192
1193      // @@protoc_insertion_point(builder_scope:hadoop.common.EchoRequestProto)
1194    }
1195
1196    static {
1197      defaultInstance = new EchoRequestProto(true);
1198      defaultInstance.initFields();
1199    }
1200
1201    // @@protoc_insertion_point(class_scope:hadoop.common.EchoRequestProto)
1202  }
1203
1204  public interface EchoResponseProtoOrBuilder
1205      extends com.google.protobuf.MessageOrBuilder {
1206
1207    // required string message = 1;
1208    /**
1209     * <code>required string message = 1;</code>
1210     */
1211    boolean hasMessage();
1212    /**
1213     * <code>required string message = 1;</code>
1214     */
1215    java.lang.String getMessage();
1216    /**
1217     * <code>required string message = 1;</code>
1218     */
1219    com.google.protobuf.ByteString
1220        getMessageBytes();
1221  }
1222  /**
1223   * Protobuf type {@code hadoop.common.EchoResponseProto}
1224   */
1225  public static final class EchoResponseProto extends
1226      com.google.protobuf.GeneratedMessage
1227      implements EchoResponseProtoOrBuilder {
1228    // Use EchoResponseProto.newBuilder() to construct.
1229    private EchoResponseProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
1230      super(builder);
1231      this.unknownFields = builder.getUnknownFields();
1232    }
1233    private EchoResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
1234
1235    private static final EchoResponseProto defaultInstance;
1236    public static EchoResponseProto getDefaultInstance() {
1237      return defaultInstance;
1238    }
1239
1240    public EchoResponseProto getDefaultInstanceForType() {
1241      return defaultInstance;
1242    }
1243
1244    private final com.google.protobuf.UnknownFieldSet unknownFields;
1245    @java.lang.Override
1246    public final com.google.protobuf.UnknownFieldSet
1247        getUnknownFields() {
1248      return this.unknownFields;
1249    }
1250    private EchoResponseProto(
1251        com.google.protobuf.CodedInputStream input,
1252        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1253        throws com.google.protobuf.InvalidProtocolBufferException {
1254      initFields();
1255      int mutable_bitField0_ = 0;
1256      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
1257          com.google.protobuf.UnknownFieldSet.newBuilder();
1258      try {
1259        boolean done = false;
1260        while (!done) {
1261          int tag = input.readTag();
1262          switch (tag) {
1263            case 0:
1264              done = true;
1265              break;
1266            default: {
1267              if (!parseUnknownField(input, unknownFields,
1268                                     extensionRegistry, tag)) {
1269                done = true;
1270              }
1271              break;
1272            }
1273            case 10: {
1274              bitField0_ |= 0x00000001;
1275              message_ = input.readBytes();
1276              break;
1277            }
1278          }
1279        }
1280      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1281        throw e.setUnfinishedMessage(this);
1282      } catch (java.io.IOException e) {
1283        throw new com.google.protobuf.InvalidProtocolBufferException(
1284            e.getMessage()).setUnfinishedMessage(this);
1285      } finally {
1286        this.unknownFields = unknownFields.build();
1287        makeExtensionsImmutable();
1288      }
1289    }
1290    public static final com.google.protobuf.Descriptors.Descriptor
1291        getDescriptor() {
1292      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoResponseProto_descriptor;
1293    }
1294
1295    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1296        internalGetFieldAccessorTable() {
1297      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoResponseProto_fieldAccessorTable
1298          .ensureFieldAccessorsInitialized(
1299              org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.Builder.class);
1300    }
1301
1302    public static com.google.protobuf.Parser<EchoResponseProto> PARSER =
1303        new com.google.protobuf.AbstractParser<EchoResponseProto>() {
1304      public EchoResponseProto parsePartialFrom(
1305          com.google.protobuf.CodedInputStream input,
1306          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1307          throws com.google.protobuf.InvalidProtocolBufferException {
1308        return new EchoResponseProto(input, extensionRegistry);
1309      }
1310    };
1311
1312    @java.lang.Override
1313    public com.google.protobuf.Parser<EchoResponseProto> getParserForType() {
1314      return PARSER;
1315    }
1316
1317    private int bitField0_;
1318    // required string message = 1;
1319    public static final int MESSAGE_FIELD_NUMBER = 1;
1320    private java.lang.Object message_;
1321    /**
1322     * <code>required string message = 1;</code>
1323     */
1324    public boolean hasMessage() {
1325      return ((bitField0_ & 0x00000001) == 0x00000001);
1326    }
1327    /**
1328     * <code>required string message = 1;</code>
1329     */
1330    public java.lang.String getMessage() {
1331      java.lang.Object ref = message_;
1332      if (ref instanceof java.lang.String) {
1333        return (java.lang.String) ref;
1334      } else {
1335        com.google.protobuf.ByteString bs = 
1336            (com.google.protobuf.ByteString) ref;
1337        java.lang.String s = bs.toStringUtf8();
1338        if (bs.isValidUtf8()) {
1339          message_ = s;
1340        }
1341        return s;
1342      }
1343    }
1344    /**
1345     * <code>required string message = 1;</code>
1346     */
1347    public com.google.protobuf.ByteString
1348        getMessageBytes() {
1349      java.lang.Object ref = message_;
1350      if (ref instanceof java.lang.String) {
1351        com.google.protobuf.ByteString b = 
1352            com.google.protobuf.ByteString.copyFromUtf8(
1353                (java.lang.String) ref);
1354        message_ = b;
1355        return b;
1356      } else {
1357        return (com.google.protobuf.ByteString) ref;
1358      }
1359    }
1360
1361    private void initFields() {
1362      message_ = "";
1363    }
1364    private byte memoizedIsInitialized = -1;
1365    public final boolean isInitialized() {
1366      byte isInitialized = memoizedIsInitialized;
1367      if (isInitialized != -1) return isInitialized == 1;
1368
1369      if (!hasMessage()) {
1370        memoizedIsInitialized = 0;
1371        return false;
1372      }
1373      memoizedIsInitialized = 1;
1374      return true;
1375    }
1376
1377    public void writeTo(com.google.protobuf.CodedOutputStream output)
1378                        throws java.io.IOException {
1379      getSerializedSize();
1380      if (((bitField0_ & 0x00000001) == 0x00000001)) {
1381        output.writeBytes(1, getMessageBytes());
1382      }
1383      getUnknownFields().writeTo(output);
1384    }
1385
1386    private int memoizedSerializedSize = -1;
1387    public int getSerializedSize() {
1388      int size = memoizedSerializedSize;
1389      if (size != -1) return size;
1390
1391      size = 0;
1392      if (((bitField0_ & 0x00000001) == 0x00000001)) {
1393        size += com.google.protobuf.CodedOutputStream
1394          .computeBytesSize(1, getMessageBytes());
1395      }
1396      size += getUnknownFields().getSerializedSize();
1397      memoizedSerializedSize = size;
1398      return size;
1399    }
1400
1401    private static final long serialVersionUID = 0L;
1402    @java.lang.Override
1403    protected java.lang.Object writeReplace()
1404        throws java.io.ObjectStreamException {
1405      return super.writeReplace();
1406    }
1407
1408    @java.lang.Override
1409    public boolean equals(final java.lang.Object obj) {
1410      if (obj == this) {
1411       return true;
1412      }
1413      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto)) {
1414        return super.equals(obj);
1415      }
1416      org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto other = (org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto) obj;
1417
1418      boolean result = true;
1419      result = result && (hasMessage() == other.hasMessage());
1420      if (hasMessage()) {
1421        result = result && getMessage()
1422            .equals(other.getMessage());
1423      }
1424      result = result &&
1425          getUnknownFields().equals(other.getUnknownFields());
1426      return result;
1427    }
1428
1429    private int memoizedHashCode = 0;
1430    @java.lang.Override
1431    public int hashCode() {
1432      if (memoizedHashCode != 0) {
1433        return memoizedHashCode;
1434      }
1435      int hash = 41;
1436      hash = (19 * hash) + getDescriptorForType().hashCode();
1437      if (hasMessage()) {
1438        hash = (37 * hash) + MESSAGE_FIELD_NUMBER;
1439        hash = (53 * hash) + getMessage().hashCode();
1440      }
1441      hash = (29 * hash) + getUnknownFields().hashCode();
1442      memoizedHashCode = hash;
1443      return hash;
1444    }
1445
1446    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(
1447        com.google.protobuf.ByteString data)
1448        throws com.google.protobuf.InvalidProtocolBufferException {
1449      return PARSER.parseFrom(data);
1450    }
1451    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(
1452        com.google.protobuf.ByteString data,
1453        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1454        throws com.google.protobuf.InvalidProtocolBufferException {
1455      return PARSER.parseFrom(data, extensionRegistry);
1456    }
1457    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(byte[] data)
1458        throws com.google.protobuf.InvalidProtocolBufferException {
1459      return PARSER.parseFrom(data);
1460    }
1461    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(
1462        byte[] data,
1463        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1464        throws com.google.protobuf.InvalidProtocolBufferException {
1465      return PARSER.parseFrom(data, extensionRegistry);
1466    }
1467    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(java.io.InputStream input)
1468        throws java.io.IOException {
1469      return PARSER.parseFrom(input);
1470    }
1471    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(
1472        java.io.InputStream input,
1473        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1474        throws java.io.IOException {
1475      return PARSER.parseFrom(input, extensionRegistry);
1476    }
1477    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseDelimitedFrom(java.io.InputStream input)
1478        throws java.io.IOException {
1479      return PARSER.parseDelimitedFrom(input);
1480    }
1481    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseDelimitedFrom(
1482        java.io.InputStream input,
1483        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1484        throws java.io.IOException {
1485      return PARSER.parseDelimitedFrom(input, extensionRegistry);
1486    }
1487    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(
1488        com.google.protobuf.CodedInputStream input)
1489        throws java.io.IOException {
1490      return PARSER.parseFrom(input);
1491    }
1492    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(
1493        com.google.protobuf.CodedInputStream input,
1494        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1495        throws java.io.IOException {
1496      return PARSER.parseFrom(input, extensionRegistry);
1497    }
1498
1499    public static Builder newBuilder() { return Builder.create(); }
1500    public Builder newBuilderForType() { return newBuilder(); }
1501    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto prototype) {
1502      return newBuilder().mergeFrom(prototype);
1503    }
1504    public Builder toBuilder() { return newBuilder(this); }
1505
1506    @java.lang.Override
1507    protected Builder newBuilderForType(
1508        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1509      Builder builder = new Builder(parent);
1510      return builder;
1511    }
1512    /**
1513     * Protobuf type {@code hadoop.common.EchoResponseProto}
1514     */
1515    public static final class Builder extends
1516        com.google.protobuf.GeneratedMessage.Builder<Builder>
1517       implements org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProtoOrBuilder {
1518      public static final com.google.protobuf.Descriptors.Descriptor
1519          getDescriptor() {
1520        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoResponseProto_descriptor;
1521      }
1522
1523      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1524          internalGetFieldAccessorTable() {
1525        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoResponseProto_fieldAccessorTable
1526            .ensureFieldAccessorsInitialized(
1527                org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.Builder.class);
1528      }
1529
1530      // Construct using org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.newBuilder()
1531      private Builder() {
1532        maybeForceBuilderInitialization();
1533      }
1534
1535      private Builder(
1536          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1537        super(parent);
1538        maybeForceBuilderInitialization();
1539      }
1540      private void maybeForceBuilderInitialization() {
1541        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
1542        }
1543      }
1544      private static Builder create() {
1545        return new Builder();
1546      }
1547
1548      public Builder clear() {
1549        super.clear();
1550        message_ = "";
1551        bitField0_ = (bitField0_ & ~0x00000001);
1552        return this;
1553      }
1554
1555      public Builder clone() {
1556        return create().mergeFrom(buildPartial());
1557      }
1558
1559      public com.google.protobuf.Descriptors.Descriptor
1560          getDescriptorForType() {
1561        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoResponseProto_descriptor;
1562      }
1563
1564      public org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto getDefaultInstanceForType() {
1565        return org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance();
1566      }
1567
1568      public org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto build() {
1569        org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto result = buildPartial();
1570        if (!result.isInitialized()) {
1571          throw newUninitializedMessageException(result);
1572        }
1573        return result;
1574      }
1575
1576      public org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto buildPartial() {
1577        org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto result = new org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto(this);
1578        int from_bitField0_ = bitField0_;
1579        int to_bitField0_ = 0;
1580        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
1581          to_bitField0_ |= 0x00000001;
1582        }
1583        result.message_ = message_;
1584        result.bitField0_ = to_bitField0_;
1585        onBuilt();
1586        return result;
1587      }
1588
1589      public Builder mergeFrom(com.google.protobuf.Message other) {
1590        if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto) {
1591          return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto)other);
1592        } else {
1593          super.mergeFrom(other);
1594          return this;
1595        }
1596      }
1597
1598      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto other) {
1599        if (other == org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance()) return this;
1600        if (other.hasMessage()) {
1601          bitField0_ |= 0x00000001;
1602          message_ = other.message_;
1603          onChanged();
1604        }
1605        this.mergeUnknownFields(other.getUnknownFields());
1606        return this;
1607      }
1608
1609      public final boolean isInitialized() {
1610        if (!hasMessage()) {
1611          
1612          return false;
1613        }
1614        return true;
1615      }
1616
1617      public Builder mergeFrom(
1618          com.google.protobuf.CodedInputStream input,
1619          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1620          throws java.io.IOException {
1621        org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parsedMessage = null;
1622        try {
1623          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
1624        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1625          parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto) e.getUnfinishedMessage();
1626          throw e;
1627        } finally {
1628          if (parsedMessage != null) {
1629            mergeFrom(parsedMessage);
1630          }
1631        }
1632        return this;
1633      }
1634      private int bitField0_;
1635
1636      // required string message = 1;
1637      private java.lang.Object message_ = "";
1638      /**
1639       * <code>required string message = 1;</code>
1640       */
1641      public boolean hasMessage() {
1642        return ((bitField0_ & 0x00000001) == 0x00000001);
1643      }
1644      /**
1645       * <code>required string message = 1;</code>
1646       */
1647      public java.lang.String getMessage() {
1648        java.lang.Object ref = message_;
1649        if (!(ref instanceof java.lang.String)) {
1650          java.lang.String s = ((com.google.protobuf.ByteString) ref)
1651              .toStringUtf8();
1652          message_ = s;
1653          return s;
1654        } else {
1655          return (java.lang.String) ref;
1656        }
1657      }
1658      /**
1659       * <code>required string message = 1;</code>
1660       */
1661      public com.google.protobuf.ByteString
1662          getMessageBytes() {
1663        java.lang.Object ref = message_;
1664        if (ref instanceof String) {
1665          com.google.protobuf.ByteString b = 
1666              com.google.protobuf.ByteString.copyFromUtf8(
1667                  (java.lang.String) ref);
1668          message_ = b;
1669          return b;
1670        } else {
1671          return (com.google.protobuf.ByteString) ref;
1672        }
1673      }
1674      /**
1675       * <code>required string message = 1;</code>
1676       */
1677      public Builder setMessage(
1678          java.lang.String value) {
1679        if (value == null) {
1680    throw new NullPointerException();
1681  }
1682  bitField0_ |= 0x00000001;
1683        message_ = value;
1684        onChanged();
1685        return this;
1686      }
1687      /**
1688       * <code>required string message = 1;</code>
1689       */
1690      public Builder clearMessage() {
1691        bitField0_ = (bitField0_ & ~0x00000001);
1692        message_ = getDefaultInstance().getMessage();
1693        onChanged();
1694        return this;
1695      }
1696      /**
1697       * <code>required string message = 1;</code>
1698       */
1699      public Builder setMessageBytes(
1700          com.google.protobuf.ByteString value) {
1701        if (value == null) {
1702    throw new NullPointerException();
1703  }
1704  bitField0_ |= 0x00000001;
1705        message_ = value;
1706        onChanged();
1707        return this;
1708      }
1709
1710      // @@protoc_insertion_point(builder_scope:hadoop.common.EchoResponseProto)
1711    }
1712
1713    static {
1714      defaultInstance = new EchoResponseProto(true);
1715      defaultInstance.initFields();
1716    }
1717
1718    // @@protoc_insertion_point(class_scope:hadoop.common.EchoResponseProto)
1719  }
1720
1721  public interface OptRequestProtoOrBuilder
1722      extends com.google.protobuf.MessageOrBuilder {
1723
1724    // optional string message = 1;
1725    /**
1726     * <code>optional string message = 1;</code>
1727     */
1728    boolean hasMessage();
1729    /**
1730     * <code>optional string message = 1;</code>
1731     */
1732    java.lang.String getMessage();
1733    /**
1734     * <code>optional string message = 1;</code>
1735     */
1736    com.google.protobuf.ByteString
1737        getMessageBytes();
1738  }
1739  /**
1740   * Protobuf type {@code hadoop.common.OptRequestProto}
1741   */
1742  public static final class OptRequestProto extends
1743      com.google.protobuf.GeneratedMessage
1744      implements OptRequestProtoOrBuilder {
1745    // Use OptRequestProto.newBuilder() to construct.
1746    private OptRequestProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
1747      super(builder);
1748      this.unknownFields = builder.getUnknownFields();
1749    }
1750    private OptRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
1751
1752    private static final OptRequestProto defaultInstance;
1753    public static OptRequestProto getDefaultInstance() {
1754      return defaultInstance;
1755    }
1756
1757    public OptRequestProto getDefaultInstanceForType() {
1758      return defaultInstance;
1759    }
1760
1761    private final com.google.protobuf.UnknownFieldSet unknownFields;
1762    @java.lang.Override
1763    public final com.google.protobuf.UnknownFieldSet
1764        getUnknownFields() {
1765      return this.unknownFields;
1766    }
1767    private OptRequestProto(
1768        com.google.protobuf.CodedInputStream input,
1769        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1770        throws com.google.protobuf.InvalidProtocolBufferException {
1771      initFields();
1772      int mutable_bitField0_ = 0;
1773      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
1774          com.google.protobuf.UnknownFieldSet.newBuilder();
1775      try {
1776        boolean done = false;
1777        while (!done) {
1778          int tag = input.readTag();
1779          switch (tag) {
1780            case 0:
1781              done = true;
1782              break;
1783            default: {
1784              if (!parseUnknownField(input, unknownFields,
1785                                     extensionRegistry, tag)) {
1786                done = true;
1787              }
1788              break;
1789            }
1790            case 10: {
1791              bitField0_ |= 0x00000001;
1792              message_ = input.readBytes();
1793              break;
1794            }
1795          }
1796        }
1797      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1798        throw e.setUnfinishedMessage(this);
1799      } catch (java.io.IOException e) {
1800        throw new com.google.protobuf.InvalidProtocolBufferException(
1801            e.getMessage()).setUnfinishedMessage(this);
1802      } finally {
1803        this.unknownFields = unknownFields.build();
1804        makeExtensionsImmutable();
1805      }
1806    }
1807    public static final com.google.protobuf.Descriptors.Descriptor
1808        getDescriptor() {
1809      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_OptRequestProto_descriptor;
1810    }
1811
1812    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1813        internalGetFieldAccessorTable() {
1814      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_OptRequestProto_fieldAccessorTable
1815          .ensureFieldAccessorsInitialized(
1816              org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto.class, org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto.Builder.class);
1817    }
1818
1819    public static com.google.protobuf.Parser<OptRequestProto> PARSER =
1820        new com.google.protobuf.AbstractParser<OptRequestProto>() {
1821      public OptRequestProto parsePartialFrom(
1822          com.google.protobuf.CodedInputStream input,
1823          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1824          throws com.google.protobuf.InvalidProtocolBufferException {
1825        return new OptRequestProto(input, extensionRegistry);
1826      }
1827    };
1828
1829    @java.lang.Override
1830    public com.google.protobuf.Parser<OptRequestProto> getParserForType() {
1831      return PARSER;
1832    }
1833
1834    private int bitField0_;
1835    // optional string message = 1;
1836    public static final int MESSAGE_FIELD_NUMBER = 1;
1837    private java.lang.Object message_;
1838    /**
1839     * <code>optional string message = 1;</code>
1840     */
1841    public boolean hasMessage() {
1842      return ((bitField0_ & 0x00000001) == 0x00000001);
1843    }
1844    /**
1845     * <code>optional string message = 1;</code>
1846     */
1847    public java.lang.String getMessage() {
1848      java.lang.Object ref = message_;
1849      if (ref instanceof java.lang.String) {
1850        return (java.lang.String) ref;
1851      } else {
1852        com.google.protobuf.ByteString bs = 
1853            (com.google.protobuf.ByteString) ref;
1854        java.lang.String s = bs.toStringUtf8();
1855        if (bs.isValidUtf8()) {
1856          message_ = s;
1857        }
1858        return s;
1859      }
1860    }
1861    /**
1862     * <code>optional string message = 1;</code>
1863     */
1864    public com.google.protobuf.ByteString
1865        getMessageBytes() {
1866      java.lang.Object ref = message_;
1867      if (ref instanceof java.lang.String) {
1868        com.google.protobuf.ByteString b = 
1869            com.google.protobuf.ByteString.copyFromUtf8(
1870                (java.lang.String) ref);
1871        message_ = b;
1872        return b;
1873      } else {
1874        return (com.google.protobuf.ByteString) ref;
1875      }
1876    }
1877
1878    private void initFields() {
1879      message_ = "";
1880    }
1881    private byte memoizedIsInitialized = -1;
1882    public final boolean isInitialized() {
1883      byte isInitialized = memoizedIsInitialized;
1884      if (isInitialized != -1) return isInitialized == 1;
1885
1886      memoizedIsInitialized = 1;
1887      return true;
1888    }
1889
1890    public void writeTo(com.google.protobuf.CodedOutputStream output)
1891                        throws java.io.IOException {
1892      getSerializedSize();
1893      if (((bitField0_ & 0x00000001) == 0x00000001)) {
1894        output.writeBytes(1, getMessageBytes());
1895      }
1896      getUnknownFields().writeTo(output);
1897    }
1898
1899    private int memoizedSerializedSize = -1;
1900    public int getSerializedSize() {
1901      int size = memoizedSerializedSize;
1902      if (size != -1) return size;
1903
1904      size = 0;
1905      if (((bitField0_ & 0x00000001) == 0x00000001)) {
1906        size += com.google.protobuf.CodedOutputStream
1907          .computeBytesSize(1, getMessageBytes());
1908      }
1909      size += getUnknownFields().getSerializedSize();
1910      memoizedSerializedSize = size;
1911      return size;
1912    }
1913
1914    private static final long serialVersionUID = 0L;
1915    @java.lang.Override
1916    protected java.lang.Object writeReplace()
1917        throws java.io.ObjectStreamException {
1918      return super.writeReplace();
1919    }
1920
1921    @java.lang.Override
1922    public boolean equals(final java.lang.Object obj) {
1923      if (obj == this) {
1924       return true;
1925      }
1926      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto)) {
1927        return super.equals(obj);
1928      }
1929      org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto other = (org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto) obj;
1930
1931      boolean result = true;
1932      result = result && (hasMessage() == other.hasMessage());
1933      if (hasMessage()) {
1934        result = result && getMessage()
1935            .equals(other.getMessage());
1936      }
1937      result = result &&
1938          getUnknownFields().equals(other.getUnknownFields());
1939      return result;
1940    }
1941
1942    private int memoizedHashCode = 0;
1943    @java.lang.Override
1944    public int hashCode() {
1945      if (memoizedHashCode != 0) {
1946        return memoizedHashCode;
1947      }
1948      int hash = 41;
1949      hash = (19 * hash) + getDescriptorForType().hashCode();
1950      if (hasMessage()) {
1951        hash = (37 * hash) + MESSAGE_FIELD_NUMBER;
1952        hash = (53 * hash) + getMessage().hashCode();
1953      }
1954      hash = (29 * hash) + getUnknownFields().hashCode();
1955      memoizedHashCode = hash;
1956      return hash;
1957    }
1958
1959    public static org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto parseFrom(
1960        com.google.protobuf.ByteString data)
1961        throws com.google.protobuf.InvalidProtocolBufferException {
1962      return PARSER.parseFrom(data);
1963    }
1964    public static org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto parseFrom(
1965        com.google.protobuf.ByteString data,
1966        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1967        throws com.google.protobuf.InvalidProtocolBufferException {
1968      return PARSER.parseFrom(data, extensionRegistry);
1969    }
1970    public static org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto parseFrom(byte[] data)
1971        throws com.google.protobuf.InvalidProtocolBufferException {
1972      return PARSER.parseFrom(data);
1973    }
1974    public static org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto parseFrom(
1975        byte[] data,
1976        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1977        throws com.google.protobuf.InvalidProtocolBufferException {
1978      return PARSER.parseFrom(data, extensionRegistry);
1979    }
1980    public static org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto parseFrom(java.io.InputStream input)
1981        throws java.io.IOException {
1982      return PARSER.parseFrom(input);
1983    }
1984    public static org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto parseFrom(
1985        java.io.InputStream input,
1986        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1987        throws java.io.IOException {
1988      return PARSER.parseFrom(input, extensionRegistry);
1989    }
1990    public static org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto parseDelimitedFrom(java.io.InputStream input)
1991        throws java.io.IOException {
1992      return PARSER.parseDelimitedFrom(input);
1993    }
1994    public static org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto parseDelimitedFrom(
1995        java.io.InputStream input,
1996        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1997        throws java.io.IOException {
1998      return PARSER.parseDelimitedFrom(input, extensionRegistry);
1999    }
2000    public static org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto parseFrom(
2001        com.google.protobuf.CodedInputStream input)
2002        throws java.io.IOException {
2003      return PARSER.parseFrom(input);
2004    }
2005    public static org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto parseFrom(
2006        com.google.protobuf.CodedInputStream input,
2007        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2008        throws java.io.IOException {
2009      return PARSER.parseFrom(input, extensionRegistry);
2010    }
2011
2012    public static Builder newBuilder() { return Builder.create(); }
2013    public Builder newBuilderForType() { return newBuilder(); }
2014    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto prototype) {
2015      return newBuilder().mergeFrom(prototype);
2016    }
2017    public Builder toBuilder() { return newBuilder(this); }
2018
2019    @java.lang.Override
2020    protected Builder newBuilderForType(
2021        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2022      Builder builder = new Builder(parent);
2023      return builder;
2024    }
2025    /**
2026     * Protobuf type {@code hadoop.common.OptRequestProto}
2027     */
2028    public static final class Builder extends
2029        com.google.protobuf.GeneratedMessage.Builder<Builder>
2030       implements org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProtoOrBuilder {
2031      public static final com.google.protobuf.Descriptors.Descriptor
2032          getDescriptor() {
2033        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_OptRequestProto_descriptor;
2034      }
2035
2036      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
2037          internalGetFieldAccessorTable() {
2038        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_OptRequestProto_fieldAccessorTable
2039            .ensureFieldAccessorsInitialized(
2040                org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto.class, org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto.Builder.class);
2041      }
2042
2043      // Construct using org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto.newBuilder()
2044      private Builder() {
2045        maybeForceBuilderInitialization();
2046      }
2047
2048      private Builder(
2049          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2050        super(parent);
2051        maybeForceBuilderInitialization();
2052      }
2053      private void maybeForceBuilderInitialization() {
2054        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
2055        }
2056      }
2057      private static Builder create() {
2058        return new Builder();
2059      }
2060
2061      public Builder clear() {
2062        super.clear();
2063        message_ = "";
2064        bitField0_ = (bitField0_ & ~0x00000001);
2065        return this;
2066      }
2067
2068      public Builder clone() {
2069        return create().mergeFrom(buildPartial());
2070      }
2071
2072      public com.google.protobuf.Descriptors.Descriptor
2073          getDescriptorForType() {
2074        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_OptRequestProto_descriptor;
2075      }
2076
2077      public org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto getDefaultInstanceForType() {
2078        return org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto.getDefaultInstance();
2079      }
2080
2081      public org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto build() {
2082        org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto result = buildPartial();
2083        if (!result.isInitialized()) {
2084          throw newUninitializedMessageException(result);
2085        }
2086        return result;
2087      }
2088
2089      public org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto buildPartial() {
2090        org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto result = new org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto(this);
2091        int from_bitField0_ = bitField0_;
2092        int to_bitField0_ = 0;
2093        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
2094          to_bitField0_ |= 0x00000001;
2095        }
2096        result.message_ = message_;
2097        result.bitField0_ = to_bitField0_;
2098        onBuilt();
2099        return result;
2100      }
2101
2102      public Builder mergeFrom(com.google.protobuf.Message other) {
2103        if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto) {
2104          return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto)other);
2105        } else {
2106          super.mergeFrom(other);
2107          return this;
2108        }
2109      }
2110
2111      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto other) {
2112        if (other == org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto.getDefaultInstance()) return this;
2113        if (other.hasMessage()) {
2114          bitField0_ |= 0x00000001;
2115          message_ = other.message_;
2116          onChanged();
2117        }
2118        this.mergeUnknownFields(other.getUnknownFields());
2119        return this;
2120      }
2121
2122      public final boolean isInitialized() {
2123        return true;
2124      }
2125
2126      public Builder mergeFrom(
2127          com.google.protobuf.CodedInputStream input,
2128          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2129          throws java.io.IOException {
2130        org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto parsedMessage = null;
2131        try {
2132          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
2133        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
2134          parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto) e.getUnfinishedMessage();
2135          throw e;
2136        } finally {
2137          if (parsedMessage != null) {
2138            mergeFrom(parsedMessage);
2139          }
2140        }
2141        return this;
2142      }
2143      private int bitField0_;
2144
2145      // optional string message = 1;
2146      private java.lang.Object message_ = "";
2147      /**
2148       * <code>optional string message = 1;</code>
2149       */
2150      public boolean hasMessage() {
2151        return ((bitField0_ & 0x00000001) == 0x00000001);
2152      }
2153      /**
2154       * <code>optional string message = 1;</code>
2155       */
2156      public java.lang.String getMessage() {
2157        java.lang.Object ref = message_;
2158        if (!(ref instanceof java.lang.String)) {
2159          java.lang.String s = ((com.google.protobuf.ByteString) ref)
2160              .toStringUtf8();
2161          message_ = s;
2162          return s;
2163        } else {
2164          return (java.lang.String) ref;
2165        }
2166      }
2167      /**
2168       * <code>optional string message = 1;</code>
2169       */
2170      public com.google.protobuf.ByteString
2171          getMessageBytes() {
2172        java.lang.Object ref = message_;
2173        if (ref instanceof String) {
2174          com.google.protobuf.ByteString b = 
2175              com.google.protobuf.ByteString.copyFromUtf8(
2176                  (java.lang.String) ref);
2177          message_ = b;
2178          return b;
2179        } else {
2180          return (com.google.protobuf.ByteString) ref;
2181        }
2182      }
2183      /**
2184       * <code>optional string message = 1;</code>
2185       */
2186      public Builder setMessage(
2187          java.lang.String value) {
2188        if (value == null) {
2189    throw new NullPointerException();
2190  }
2191  bitField0_ |= 0x00000001;
2192        message_ = value;
2193        onChanged();
2194        return this;
2195      }
2196      /**
2197       * <code>optional string message = 1;</code>
2198       */
2199      public Builder clearMessage() {
2200        bitField0_ = (bitField0_ & ~0x00000001);
2201        message_ = getDefaultInstance().getMessage();
2202        onChanged();
2203        return this;
2204      }
2205      /**
2206       * <code>optional string message = 1;</code>
2207       */
2208      public Builder setMessageBytes(
2209          com.google.protobuf.ByteString value) {
2210        if (value == null) {
2211    throw new NullPointerException();
2212  }
2213  bitField0_ |= 0x00000001;
2214        message_ = value;
2215        onChanged();
2216        return this;
2217      }
2218
2219      // @@protoc_insertion_point(builder_scope:hadoop.common.OptRequestProto)
2220    }
2221
2222    static {
2223      defaultInstance = new OptRequestProto(true);
2224      defaultInstance.initFields();
2225    }
2226
2227    // @@protoc_insertion_point(class_scope:hadoop.common.OptRequestProto)
2228  }
2229
2230  public interface OptResponseProtoOrBuilder
2231      extends com.google.protobuf.MessageOrBuilder {
2232
2233    // optional string message = 1;
2234    /**
2235     * <code>optional string message = 1;</code>
2236     */
2237    boolean hasMessage();
2238    /**
2239     * <code>optional string message = 1;</code>
2240     */
2241    java.lang.String getMessage();
2242    /**
2243     * <code>optional string message = 1;</code>
2244     */
2245    com.google.protobuf.ByteString
2246        getMessageBytes();
2247  }
2248  /**
2249   * Protobuf type {@code hadoop.common.OptResponseProto}
2250   */
2251  public static final class OptResponseProto extends
2252      com.google.protobuf.GeneratedMessage
2253      implements OptResponseProtoOrBuilder {
2254    // Use OptResponseProto.newBuilder() to construct.
2255    private OptResponseProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
2256      super(builder);
2257      this.unknownFields = builder.getUnknownFields();
2258    }
2259    private OptResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
2260
2261    private static final OptResponseProto defaultInstance;
2262    public static OptResponseProto getDefaultInstance() {
2263      return defaultInstance;
2264    }
2265
2266    public OptResponseProto getDefaultInstanceForType() {
2267      return defaultInstance;
2268    }
2269
2270    private final com.google.protobuf.UnknownFieldSet unknownFields;
2271    @java.lang.Override
2272    public final com.google.protobuf.UnknownFieldSet
2273        getUnknownFields() {
2274      return this.unknownFields;
2275    }
2276    private OptResponseProto(
2277        com.google.protobuf.CodedInputStream input,
2278        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2279        throws com.google.protobuf.InvalidProtocolBufferException {
2280      initFields();
2281      int mutable_bitField0_ = 0;
2282      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
2283          com.google.protobuf.UnknownFieldSet.newBuilder();
2284      try {
2285        boolean done = false;
2286        while (!done) {
2287          int tag = input.readTag();
2288          switch (tag) {
2289            case 0:
2290              done = true;
2291              break;
2292            default: {
2293              if (!parseUnknownField(input, unknownFields,
2294                                     extensionRegistry, tag)) {
2295                done = true;
2296              }
2297              break;
2298            }
2299            case 10: {
2300              bitField0_ |= 0x00000001;
2301              message_ = input.readBytes();
2302              break;
2303            }
2304          }
2305        }
2306      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
2307        throw e.setUnfinishedMessage(this);
2308      } catch (java.io.IOException e) {
2309        throw new com.google.protobuf.InvalidProtocolBufferException(
2310            e.getMessage()).setUnfinishedMessage(this);
2311      } finally {
2312        this.unknownFields = unknownFields.build();
2313        makeExtensionsImmutable();
2314      }
2315    }
2316    public static final com.google.protobuf.Descriptors.Descriptor
2317        getDescriptor() {
2318      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_OptResponseProto_descriptor;
2319    }
2320
2321    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
2322        internalGetFieldAccessorTable() {
2323      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_OptResponseProto_fieldAccessorTable
2324          .ensureFieldAccessorsInitialized(
2325              org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto.Builder.class);
2326    }
2327
2328    public static com.google.protobuf.Parser<OptResponseProto> PARSER =
2329        new com.google.protobuf.AbstractParser<OptResponseProto>() {
2330      public OptResponseProto parsePartialFrom(
2331          com.google.protobuf.CodedInputStream input,
2332          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2333          throws com.google.protobuf.InvalidProtocolBufferException {
2334        return new OptResponseProto(input, extensionRegistry);
2335      }
2336    };
2337
2338    @java.lang.Override
2339    public com.google.protobuf.Parser<OptResponseProto> getParserForType() {
2340      return PARSER;
2341    }
2342
2343    private int bitField0_;
2344    // optional string message = 1;
2345    public static final int MESSAGE_FIELD_NUMBER = 1;
2346    private java.lang.Object message_;
2347    /**
2348     * <code>optional string message = 1;</code>
2349     */
2350    public boolean hasMessage() {
2351      return ((bitField0_ & 0x00000001) == 0x00000001);
2352    }
2353    /**
2354     * <code>optional string message = 1;</code>
2355     */
2356    public java.lang.String getMessage() {
2357      java.lang.Object ref = message_;
2358      if (ref instanceof java.lang.String) {
2359        return (java.lang.String) ref;
2360      } else {
2361        com.google.protobuf.ByteString bs = 
2362            (com.google.protobuf.ByteString) ref;
2363        java.lang.String s = bs.toStringUtf8();
2364        if (bs.isValidUtf8()) {
2365          message_ = s;
2366        }
2367        return s;
2368      }
2369    }
2370    /**
2371     * <code>optional string message = 1;</code>
2372     */
2373    public com.google.protobuf.ByteString
2374        getMessageBytes() {
2375      java.lang.Object ref = message_;
2376      if (ref instanceof java.lang.String) {
2377        com.google.protobuf.ByteString b = 
2378            com.google.protobuf.ByteString.copyFromUtf8(
2379                (java.lang.String) ref);
2380        message_ = b;
2381        return b;
2382      } else {
2383        return (com.google.protobuf.ByteString) ref;
2384      }
2385    }
2386
2387    private void initFields() {
2388      message_ = "";
2389    }
2390    private byte memoizedIsInitialized = -1;
2391    public final boolean isInitialized() {
2392      byte isInitialized = memoizedIsInitialized;
2393      if (isInitialized != -1) return isInitialized == 1;
2394
2395      memoizedIsInitialized = 1;
2396      return true;
2397    }
2398
2399    public void writeTo(com.google.protobuf.CodedOutputStream output)
2400                        throws java.io.IOException {
2401      getSerializedSize();
2402      if (((bitField0_ & 0x00000001) == 0x00000001)) {
2403        output.writeBytes(1, getMessageBytes());
2404      }
2405      getUnknownFields().writeTo(output);
2406    }
2407
2408    private int memoizedSerializedSize = -1;
2409    public int getSerializedSize() {
2410      int size = memoizedSerializedSize;
2411      if (size != -1) return size;
2412
2413      size = 0;
2414      if (((bitField0_ & 0x00000001) == 0x00000001)) {
2415        size += com.google.protobuf.CodedOutputStream
2416          .computeBytesSize(1, getMessageBytes());
2417      }
2418      size += getUnknownFields().getSerializedSize();
2419      memoizedSerializedSize = size;
2420      return size;
2421    }
2422
2423    private static final long serialVersionUID = 0L;
2424    @java.lang.Override
2425    protected java.lang.Object writeReplace()
2426        throws java.io.ObjectStreamException {
2427      return super.writeReplace();
2428    }
2429
2430    @java.lang.Override
2431    public boolean equals(final java.lang.Object obj) {
2432      if (obj == this) {
2433       return true;
2434      }
2435      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto)) {
2436        return super.equals(obj);
2437      }
2438      org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto other = (org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto) obj;
2439
2440      boolean result = true;
2441      result = result && (hasMessage() == other.hasMessage());
2442      if (hasMessage()) {
2443        result = result && getMessage()
2444            .equals(other.getMessage());
2445      }
2446      result = result &&
2447          getUnknownFields().equals(other.getUnknownFields());
2448      return result;
2449    }
2450
2451    private int memoizedHashCode = 0;
2452    @java.lang.Override
2453    public int hashCode() {
2454      if (memoizedHashCode != 0) {
2455        return memoizedHashCode;
2456      }
2457      int hash = 41;
2458      hash = (19 * hash) + getDescriptorForType().hashCode();
2459      if (hasMessage()) {
2460        hash = (37 * hash) + MESSAGE_FIELD_NUMBER;
2461        hash = (53 * hash) + getMessage().hashCode();
2462      }
2463      hash = (29 * hash) + getUnknownFields().hashCode();
2464      memoizedHashCode = hash;
2465      return hash;
2466    }
2467
2468    public static org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto parseFrom(
2469        com.google.protobuf.ByteString data)
2470        throws com.google.protobuf.InvalidProtocolBufferException {
2471      return PARSER.parseFrom(data);
2472    }
2473    public static org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto parseFrom(
2474        com.google.protobuf.ByteString data,
2475        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2476        throws com.google.protobuf.InvalidProtocolBufferException {
2477      return PARSER.parseFrom(data, extensionRegistry);
2478    }
2479    public static org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto parseFrom(byte[] data)
2480        throws com.google.protobuf.InvalidProtocolBufferException {
2481      return PARSER.parseFrom(data);
2482    }
2483    public static org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto parseFrom(
2484        byte[] data,
2485        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2486        throws com.google.protobuf.InvalidProtocolBufferException {
2487      return PARSER.parseFrom(data, extensionRegistry);
2488    }
2489    public static org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto parseFrom(java.io.InputStream input)
2490        throws java.io.IOException {
2491      return PARSER.parseFrom(input);
2492    }
2493    public static org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto parseFrom(
2494        java.io.InputStream input,
2495        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2496        throws java.io.IOException {
2497      return PARSER.parseFrom(input, extensionRegistry);
2498    }
2499    public static org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto parseDelimitedFrom(java.io.InputStream input)
2500        throws java.io.IOException {
2501      return PARSER.parseDelimitedFrom(input);
2502    }
2503    public static org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto parseDelimitedFrom(
2504        java.io.InputStream input,
2505        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2506        throws java.io.IOException {
2507      return PARSER.parseDelimitedFrom(input, extensionRegistry);
2508    }
2509    public static org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto parseFrom(
2510        com.google.protobuf.CodedInputStream input)
2511        throws java.io.IOException {
2512      return PARSER.parseFrom(input);
2513    }
2514    public static org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto parseFrom(
2515        com.google.protobuf.CodedInputStream input,
2516        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2517        throws java.io.IOException {
2518      return PARSER.parseFrom(input, extensionRegistry);
2519    }
2520
2521    public static Builder newBuilder() { return Builder.create(); }
2522    public Builder newBuilderForType() { return newBuilder(); }
2523    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto prototype) {
2524      return newBuilder().mergeFrom(prototype);
2525    }
2526    public Builder toBuilder() { return newBuilder(this); }
2527
2528    @java.lang.Override
2529    protected Builder newBuilderForType(
2530        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2531      Builder builder = new Builder(parent);
2532      return builder;
2533    }
2534    /**
2535     * Protobuf type {@code hadoop.common.OptResponseProto}
2536     */
2537    public static final class Builder extends
2538        com.google.protobuf.GeneratedMessage.Builder<Builder>
2539       implements org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProtoOrBuilder {
2540      public static final com.google.protobuf.Descriptors.Descriptor
2541          getDescriptor() {
2542        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_OptResponseProto_descriptor;
2543      }
2544
2545      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
2546          internalGetFieldAccessorTable() {
2547        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_OptResponseProto_fieldAccessorTable
2548            .ensureFieldAccessorsInitialized(
2549                org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto.Builder.class);
2550      }
2551
2552      // Construct using org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto.newBuilder()
2553      private Builder() {
2554        maybeForceBuilderInitialization();
2555      }
2556
2557      private Builder(
2558          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2559        super(parent);
2560        maybeForceBuilderInitialization();
2561      }
2562      private void maybeForceBuilderInitialization() {
2563        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
2564        }
2565      }
2566      private static Builder create() {
2567        return new Builder();
2568      }
2569
2570      public Builder clear() {
2571        super.clear();
2572        message_ = "";
2573        bitField0_ = (bitField0_ & ~0x00000001);
2574        return this;
2575      }
2576
2577      public Builder clone() {
2578        return create().mergeFrom(buildPartial());
2579      }
2580
2581      public com.google.protobuf.Descriptors.Descriptor
2582          getDescriptorForType() {
2583        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_OptResponseProto_descriptor;
2584      }
2585
2586      public org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto getDefaultInstanceForType() {
2587        return org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto.getDefaultInstance();
2588      }
2589
2590      public org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto build() {
2591        org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto result = buildPartial();
2592        if (!result.isInitialized()) {
2593          throw newUninitializedMessageException(result);
2594        }
2595        return result;
2596      }
2597
2598      public org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto buildPartial() {
2599        org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto result = new org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto(this);
2600        int from_bitField0_ = bitField0_;
2601        int to_bitField0_ = 0;
2602        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
2603          to_bitField0_ |= 0x00000001;
2604        }
2605        result.message_ = message_;
2606        result.bitField0_ = to_bitField0_;
2607        onBuilt();
2608        return result;
2609      }
2610
2611      public Builder mergeFrom(com.google.protobuf.Message other) {
2612        if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto) {
2613          return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto)other);
2614        } else {
2615          super.mergeFrom(other);
2616          return this;
2617        }
2618      }
2619
2620      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto other) {
2621        if (other == org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto.getDefaultInstance()) return this;
2622        if (other.hasMessage()) {
2623          bitField0_ |= 0x00000001;
2624          message_ = other.message_;
2625          onChanged();
2626        }
2627        this.mergeUnknownFields(other.getUnknownFields());
2628        return this;
2629      }
2630
2631      public final boolean isInitialized() {
2632        return true;
2633      }
2634
2635      public Builder mergeFrom(
2636          com.google.protobuf.CodedInputStream input,
2637          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2638          throws java.io.IOException {
2639        org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto parsedMessage = null;
2640        try {
2641          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
2642        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
2643          parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto) e.getUnfinishedMessage();
2644          throw e;
2645        } finally {
2646          if (parsedMessage != null) {
2647            mergeFrom(parsedMessage);
2648          }
2649        }
2650        return this;
2651      }
2652      private int bitField0_;
2653
2654      // optional string message = 1;
2655      private java.lang.Object message_ = "";
2656      /**
2657       * <code>optional string message = 1;</code>
2658       */
2659      public boolean hasMessage() {
2660        return ((bitField0_ & 0x00000001) == 0x00000001);
2661      }
2662      /**
2663       * <code>optional string message = 1;</code>
2664       */
2665      public java.lang.String getMessage() {
2666        java.lang.Object ref = message_;
2667        if (!(ref instanceof java.lang.String)) {
2668          java.lang.String s = ((com.google.protobuf.ByteString) ref)
2669              .toStringUtf8();
2670          message_ = s;
2671          return s;
2672        } else {
2673          return (java.lang.String) ref;
2674        }
2675      }
2676      /**
2677       * <code>optional string message = 1;</code>
2678       */
2679      public com.google.protobuf.ByteString
2680          getMessageBytes() {
2681        java.lang.Object ref = message_;
2682        if (ref instanceof String) {
2683          com.google.protobuf.ByteString b = 
2684              com.google.protobuf.ByteString.copyFromUtf8(
2685                  (java.lang.String) ref);
2686          message_ = b;
2687          return b;
2688        } else {
2689          return (com.google.protobuf.ByteString) ref;
2690        }
2691      }
2692      /**
2693       * <code>optional string message = 1;</code>
2694       */
2695      public Builder setMessage(
2696          java.lang.String value) {
2697        if (value == null) {
2698    throw new NullPointerException();
2699  }
2700  bitField0_ |= 0x00000001;
2701        message_ = value;
2702        onChanged();
2703        return this;
2704      }
2705      /**
2706       * <code>optional string message = 1;</code>
2707       */
2708      public Builder clearMessage() {
2709        bitField0_ = (bitField0_ & ~0x00000001);
2710        message_ = getDefaultInstance().getMessage();
2711        onChanged();
2712        return this;
2713      }
2714      /**
2715       * <code>optional string message = 1;</code>
2716       */
2717      public Builder setMessageBytes(
2718          com.google.protobuf.ByteString value) {
2719        if (value == null) {
2720    throw new NullPointerException();
2721  }
2722  bitField0_ |= 0x00000001;
2723        message_ = value;
2724        onChanged();
2725        return this;
2726      }
2727
2728      // @@protoc_insertion_point(builder_scope:hadoop.common.OptResponseProto)
2729    }
2730
2731    static {
2732      defaultInstance = new OptResponseProto(true);
2733      defaultInstance.initFields();
2734    }
2735
2736    // @@protoc_insertion_point(class_scope:hadoop.common.OptResponseProto)
2737  }
2738
2739  public interface SleepRequestProtoOrBuilder
2740      extends com.google.protobuf.MessageOrBuilder {
2741
2742    // required int32 milliSeconds = 1;
2743    /**
2744     * <code>required int32 milliSeconds = 1;</code>
2745     */
2746    boolean hasMilliSeconds();
2747    /**
2748     * <code>required int32 milliSeconds = 1;</code>
2749     */
2750    int getMilliSeconds();
2751  }
2752  /**
2753   * Protobuf type {@code hadoop.common.SleepRequestProto}
2754   */
2755  public static final class SleepRequestProto extends
2756      com.google.protobuf.GeneratedMessage
2757      implements SleepRequestProtoOrBuilder {
2758    // Use SleepRequestProto.newBuilder() to construct.
2759    private SleepRequestProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
2760      super(builder);
2761      this.unknownFields = builder.getUnknownFields();
2762    }
2763    private SleepRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
2764
2765    private static final SleepRequestProto defaultInstance;
2766    public static SleepRequestProto getDefaultInstance() {
2767      return defaultInstance;
2768    }
2769
2770    public SleepRequestProto getDefaultInstanceForType() {
2771      return defaultInstance;
2772    }
2773
2774    private final com.google.protobuf.UnknownFieldSet unknownFields;
2775    @java.lang.Override
2776    public final com.google.protobuf.UnknownFieldSet
2777        getUnknownFields() {
2778      return this.unknownFields;
2779    }
2780    private SleepRequestProto(
2781        com.google.protobuf.CodedInputStream input,
2782        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2783        throws com.google.protobuf.InvalidProtocolBufferException {
2784      initFields();
2785      int mutable_bitField0_ = 0;
2786      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
2787          com.google.protobuf.UnknownFieldSet.newBuilder();
2788      try {
2789        boolean done = false;
2790        while (!done) {
2791          int tag = input.readTag();
2792          switch (tag) {
2793            case 0:
2794              done = true;
2795              break;
2796            default: {
2797              if (!parseUnknownField(input, unknownFields,
2798                                     extensionRegistry, tag)) {
2799                done = true;
2800              }
2801              break;
2802            }
2803            case 8: {
2804              bitField0_ |= 0x00000001;
2805              milliSeconds_ = input.readInt32();
2806              break;
2807            }
2808          }
2809        }
2810      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
2811        throw e.setUnfinishedMessage(this);
2812      } catch (java.io.IOException e) {
2813        throw new com.google.protobuf.InvalidProtocolBufferException(
2814            e.getMessage()).setUnfinishedMessage(this);
2815      } finally {
2816        this.unknownFields = unknownFields.build();
2817        makeExtensionsImmutable();
2818      }
2819    }
2820    public static final com.google.protobuf.Descriptors.Descriptor
2821        getDescriptor() {
2822      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_SleepRequestProto_descriptor;
2823    }
2824
2825    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
2826        internalGetFieldAccessorTable() {
2827      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_SleepRequestProto_fieldAccessorTable
2828          .ensureFieldAccessorsInitialized(
2829              org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto.class, org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto.Builder.class);
2830    }
2831
2832    public static com.google.protobuf.Parser<SleepRequestProto> PARSER =
2833        new com.google.protobuf.AbstractParser<SleepRequestProto>() {
2834      public SleepRequestProto parsePartialFrom(
2835          com.google.protobuf.CodedInputStream input,
2836          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2837          throws com.google.protobuf.InvalidProtocolBufferException {
2838        return new SleepRequestProto(input, extensionRegistry);
2839      }
2840    };
2841
2842    @java.lang.Override
2843    public com.google.protobuf.Parser<SleepRequestProto> getParserForType() {
2844      return PARSER;
2845    }
2846
2847    private int bitField0_;
2848    // required int32 milliSeconds = 1;
2849    public static final int MILLISECONDS_FIELD_NUMBER = 1;
2850    private int milliSeconds_;
2851    /**
2852     * <code>required int32 milliSeconds = 1;</code>
2853     */
2854    public boolean hasMilliSeconds() {
2855      return ((bitField0_ & 0x00000001) == 0x00000001);
2856    }
2857    /**
2858     * <code>required int32 milliSeconds = 1;</code>
2859     */
2860    public int getMilliSeconds() {
2861      return milliSeconds_;
2862    }
2863
2864    private void initFields() {
2865      milliSeconds_ = 0;
2866    }
2867    private byte memoizedIsInitialized = -1;
2868    public final boolean isInitialized() {
2869      byte isInitialized = memoizedIsInitialized;
2870      if (isInitialized != -1) return isInitialized == 1;
2871
2872      if (!hasMilliSeconds()) {
2873        memoizedIsInitialized = 0;
2874        return false;
2875      }
2876      memoizedIsInitialized = 1;
2877      return true;
2878    }
2879
2880    public void writeTo(com.google.protobuf.CodedOutputStream output)
2881                        throws java.io.IOException {
2882      getSerializedSize();
2883      if (((bitField0_ & 0x00000001) == 0x00000001)) {
2884        output.writeInt32(1, milliSeconds_);
2885      }
2886      getUnknownFields().writeTo(output);
2887    }
2888
2889    private int memoizedSerializedSize = -1;
2890    public int getSerializedSize() {
2891      int size = memoizedSerializedSize;
2892      if (size != -1) return size;
2893
2894      size = 0;
2895      if (((bitField0_ & 0x00000001) == 0x00000001)) {
2896        size += com.google.protobuf.CodedOutputStream
2897          .computeInt32Size(1, milliSeconds_);
2898      }
2899      size += getUnknownFields().getSerializedSize();
2900      memoizedSerializedSize = size;
2901      return size;
2902    }
2903
2904    private static final long serialVersionUID = 0L;
2905    @java.lang.Override
2906    protected java.lang.Object writeReplace()
2907        throws java.io.ObjectStreamException {
2908      return super.writeReplace();
2909    }
2910
2911    @java.lang.Override
2912    public boolean equals(final java.lang.Object obj) {
2913      if (obj == this) {
2914       return true;
2915      }
2916      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto)) {
2917        return super.equals(obj);
2918      }
2919      org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto other = (org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto) obj;
2920
2921      boolean result = true;
2922      result = result && (hasMilliSeconds() == other.hasMilliSeconds());
2923      if (hasMilliSeconds()) {
2924        result = result && (getMilliSeconds()
2925            == other.getMilliSeconds());
2926      }
2927      result = result &&
2928          getUnknownFields().equals(other.getUnknownFields());
2929      return result;
2930    }
2931
2932    private int memoizedHashCode = 0;
2933    @java.lang.Override
2934    public int hashCode() {
2935      if (memoizedHashCode != 0) {
2936        return memoizedHashCode;
2937      }
2938      int hash = 41;
2939      hash = (19 * hash) + getDescriptorForType().hashCode();
2940      if (hasMilliSeconds()) {
2941        hash = (37 * hash) + MILLISECONDS_FIELD_NUMBER;
2942        hash = (53 * hash) + getMilliSeconds();
2943      }
2944      hash = (29 * hash) + getUnknownFields().hashCode();
2945      memoizedHashCode = hash;
2946      return hash;
2947    }
2948
2949    public static org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto parseFrom(
2950        com.google.protobuf.ByteString data)
2951        throws com.google.protobuf.InvalidProtocolBufferException {
2952      return PARSER.parseFrom(data);
2953    }
2954    public static org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto parseFrom(
2955        com.google.protobuf.ByteString data,
2956        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2957        throws com.google.protobuf.InvalidProtocolBufferException {
2958      return PARSER.parseFrom(data, extensionRegistry);
2959    }
2960    public static org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto parseFrom(byte[] data)
2961        throws com.google.protobuf.InvalidProtocolBufferException {
2962      return PARSER.parseFrom(data);
2963    }
2964    public static org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto parseFrom(
2965        byte[] data,
2966        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2967        throws com.google.protobuf.InvalidProtocolBufferException {
2968      return PARSER.parseFrom(data, extensionRegistry);
2969    }
2970    public static org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto parseFrom(java.io.InputStream input)
2971        throws java.io.IOException {
2972      return PARSER.parseFrom(input);
2973    }
2974    public static org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto parseFrom(
2975        java.io.InputStream input,
2976        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2977        throws java.io.IOException {
2978      return PARSER.parseFrom(input, extensionRegistry);
2979    }
2980    public static org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto parseDelimitedFrom(java.io.InputStream input)
2981        throws java.io.IOException {
2982      return PARSER.parseDelimitedFrom(input);
2983    }
2984    public static org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto parseDelimitedFrom(
2985        java.io.InputStream input,
2986        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2987        throws java.io.IOException {
2988      return PARSER.parseDelimitedFrom(input, extensionRegistry);
2989    }
2990    public static org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto parseFrom(
2991        com.google.protobuf.CodedInputStream input)
2992        throws java.io.IOException {
2993      return PARSER.parseFrom(input);
2994    }
2995    public static org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto parseFrom(
2996        com.google.protobuf.CodedInputStream input,
2997        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2998        throws java.io.IOException {
2999      return PARSER.parseFrom(input, extensionRegistry);
3000    }
3001
3002    public static Builder newBuilder() { return Builder.create(); }
3003    public Builder newBuilderForType() { return newBuilder(); }
3004    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto prototype) {
3005      return newBuilder().mergeFrom(prototype);
3006    }
3007    public Builder toBuilder() { return newBuilder(this); }
3008
3009    @java.lang.Override
3010    protected Builder newBuilderForType(
3011        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
3012      Builder builder = new Builder(parent);
3013      return builder;
3014    }
3015    /**
3016     * Protobuf type {@code hadoop.common.SleepRequestProto}
3017     */
3018    public static final class Builder extends
3019        com.google.protobuf.GeneratedMessage.Builder<Builder>
3020       implements org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProtoOrBuilder {
3021      public static final com.google.protobuf.Descriptors.Descriptor
3022          getDescriptor() {
3023        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_SleepRequestProto_descriptor;
3024      }
3025
3026      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
3027          internalGetFieldAccessorTable() {
3028        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_SleepRequestProto_fieldAccessorTable
3029            .ensureFieldAccessorsInitialized(
3030                org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto.class, org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto.Builder.class);
3031      }
3032
3033      // Construct using org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto.newBuilder()
3034      private Builder() {
3035        maybeForceBuilderInitialization();
3036      }
3037
3038      private Builder(
3039          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
3040        super(parent);
3041        maybeForceBuilderInitialization();
3042      }
3043      private void maybeForceBuilderInitialization() {
3044        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
3045        }
3046      }
3047      private static Builder create() {
3048        return new Builder();
3049      }
3050
3051      public Builder clear() {
3052        super.clear();
3053        milliSeconds_ = 0;
3054        bitField0_ = (bitField0_ & ~0x00000001);
3055        return this;
3056      }
3057
3058      public Builder clone() {
3059        return create().mergeFrom(buildPartial());
3060      }
3061
3062      public com.google.protobuf.Descriptors.Descriptor
3063          getDescriptorForType() {
3064        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_SleepRequestProto_descriptor;
3065      }
3066
3067      public org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto getDefaultInstanceForType() {
3068        return org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto.getDefaultInstance();
3069      }
3070
3071      public org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto build() {
3072        org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto result = buildPartial();
3073        if (!result.isInitialized()) {
3074          throw newUninitializedMessageException(result);
3075        }
3076        return result;
3077      }
3078
3079      public org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto buildPartial() {
3080        org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto result = new org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto(this);
3081        int from_bitField0_ = bitField0_;
3082        int to_bitField0_ = 0;
3083        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
3084          to_bitField0_ |= 0x00000001;
3085        }
3086        result.milliSeconds_ = milliSeconds_;
3087        result.bitField0_ = to_bitField0_;
3088        onBuilt();
3089        return result;
3090      }
3091
3092      public Builder mergeFrom(com.google.protobuf.Message other) {
3093        if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto) {
3094          return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto)other);
3095        } else {
3096          super.mergeFrom(other);
3097          return this;
3098        }
3099      }
3100
3101      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto other) {
3102        if (other == org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto.getDefaultInstance()) return this;
3103        if (other.hasMilliSeconds()) {
3104          setMilliSeconds(other.getMilliSeconds());
3105        }
3106        this.mergeUnknownFields(other.getUnknownFields());
3107        return this;
3108      }
3109
3110      public final boolean isInitialized() {
3111        if (!hasMilliSeconds()) {
3112          
3113          return false;
3114        }
3115        return true;
3116      }
3117
3118      public Builder mergeFrom(
3119          com.google.protobuf.CodedInputStream input,
3120          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3121          throws java.io.IOException {
3122        org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto parsedMessage = null;
3123        try {
3124          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
3125        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
3126          parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto) e.getUnfinishedMessage();
3127          throw e;
3128        } finally {
3129          if (parsedMessage != null) {
3130            mergeFrom(parsedMessage);
3131          }
3132        }
3133        return this;
3134      }
3135      private int bitField0_;
3136
3137      // required int32 milliSeconds = 1;
3138      private int milliSeconds_ ;
3139      /**
3140       * <code>required int32 milliSeconds = 1;</code>
3141       */
3142      public boolean hasMilliSeconds() {
3143        return ((bitField0_ & 0x00000001) == 0x00000001);
3144      }
3145      /**
3146       * <code>required int32 milliSeconds = 1;</code>
3147       */
3148      public int getMilliSeconds() {
3149        return milliSeconds_;
3150      }
3151      /**
3152       * <code>required int32 milliSeconds = 1;</code>
3153       */
3154      public Builder setMilliSeconds(int value) {
3155        bitField0_ |= 0x00000001;
3156        milliSeconds_ = value;
3157        onChanged();
3158        return this;
3159      }
3160      /**
3161       * <code>required int32 milliSeconds = 1;</code>
3162       */
3163      public Builder clearMilliSeconds() {
3164        bitField0_ = (bitField0_ & ~0x00000001);
3165        milliSeconds_ = 0;
3166        onChanged();
3167        return this;
3168      }
3169
3170      // @@protoc_insertion_point(builder_scope:hadoop.common.SleepRequestProto)
3171    }
3172
3173    static {
3174      defaultInstance = new SleepRequestProto(true);
3175      defaultInstance.initFields();
3176    }
3177
3178    // @@protoc_insertion_point(class_scope:hadoop.common.SleepRequestProto)
3179  }
3180
3181  public interface SleepResponseProtoOrBuilder
3182      extends com.google.protobuf.MessageOrBuilder {
3183  }
3184  /**
3185   * Protobuf type {@code hadoop.common.SleepResponseProto}
3186   */
3187  public static final class SleepResponseProto extends
3188      com.google.protobuf.GeneratedMessage
3189      implements SleepResponseProtoOrBuilder {
3190    // Use SleepResponseProto.newBuilder() to construct.
3191    private SleepResponseProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
3192      super(builder);
3193      this.unknownFields = builder.getUnknownFields();
3194    }
3195    private SleepResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
3196
3197    private static final SleepResponseProto defaultInstance;
3198    public static SleepResponseProto getDefaultInstance() {
3199      return defaultInstance;
3200    }
3201
3202    public SleepResponseProto getDefaultInstanceForType() {
3203      return defaultInstance;
3204    }
3205
3206    private final com.google.protobuf.UnknownFieldSet unknownFields;
3207    @java.lang.Override
3208    public final com.google.protobuf.UnknownFieldSet
3209        getUnknownFields() {
3210      return this.unknownFields;
3211    }
3212    private SleepResponseProto(
3213        com.google.protobuf.CodedInputStream input,
3214        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3215        throws com.google.protobuf.InvalidProtocolBufferException {
3216      initFields();
3217      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
3218          com.google.protobuf.UnknownFieldSet.newBuilder();
3219      try {
3220        boolean done = false;
3221        while (!done) {
3222          int tag = input.readTag();
3223          switch (tag) {
3224            case 0:
3225              done = true;
3226              break;
3227            default: {
3228              if (!parseUnknownField(input, unknownFields,
3229                                     extensionRegistry, tag)) {
3230                done = true;
3231              }
3232              break;
3233            }
3234          }
3235        }
3236      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
3237        throw e.setUnfinishedMessage(this);
3238      } catch (java.io.IOException e) {
3239        throw new com.google.protobuf.InvalidProtocolBufferException(
3240            e.getMessage()).setUnfinishedMessage(this);
3241      } finally {
3242        this.unknownFields = unknownFields.build();
3243        makeExtensionsImmutable();
3244      }
3245    }
3246    public static final com.google.protobuf.Descriptors.Descriptor
3247        getDescriptor() {
3248      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_SleepResponseProto_descriptor;
3249    }
3250
3251    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
3252        internalGetFieldAccessorTable() {
3253      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_SleepResponseProto_fieldAccessorTable
3254          .ensureFieldAccessorsInitialized(
3255              org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto.Builder.class);
3256    }
3257
3258    public static com.google.protobuf.Parser<SleepResponseProto> PARSER =
3259        new com.google.protobuf.AbstractParser<SleepResponseProto>() {
3260      public SleepResponseProto parsePartialFrom(
3261          com.google.protobuf.CodedInputStream input,
3262          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3263          throws com.google.protobuf.InvalidProtocolBufferException {
3264        return new SleepResponseProto(input, extensionRegistry);
3265      }
3266    };
3267
3268    @java.lang.Override
3269    public com.google.protobuf.Parser<SleepResponseProto> getParserForType() {
3270      return PARSER;
3271    }
3272
3273    private void initFields() {
3274    }
3275    private byte memoizedIsInitialized = -1;
3276    public final boolean isInitialized() {
3277      byte isInitialized = memoizedIsInitialized;
3278      if (isInitialized != -1) return isInitialized == 1;
3279
3280      memoizedIsInitialized = 1;
3281      return true;
3282    }
3283
3284    public void writeTo(com.google.protobuf.CodedOutputStream output)
3285                        throws java.io.IOException {
3286      getSerializedSize();
3287      getUnknownFields().writeTo(output);
3288    }
3289
3290    private int memoizedSerializedSize = -1;
3291    public int getSerializedSize() {
3292      int size = memoizedSerializedSize;
3293      if (size != -1) return size;
3294
3295      size = 0;
3296      size += getUnknownFields().getSerializedSize();
3297      memoizedSerializedSize = size;
3298      return size;
3299    }
3300
3301    private static final long serialVersionUID = 0L;
3302    @java.lang.Override
3303    protected java.lang.Object writeReplace()
3304        throws java.io.ObjectStreamException {
3305      return super.writeReplace();
3306    }
3307
3308    @java.lang.Override
3309    public boolean equals(final java.lang.Object obj) {
3310      if (obj == this) {
3311       return true;
3312      }
3313      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto)) {
3314        return super.equals(obj);
3315      }
3316      org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto other = (org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto) obj;
3317
3318      boolean result = true;
3319      result = result &&
3320          getUnknownFields().equals(other.getUnknownFields());
3321      return result;
3322    }
3323
3324    private int memoizedHashCode = 0;
3325    @java.lang.Override
3326    public int hashCode() {
3327      if (memoizedHashCode != 0) {
3328        return memoizedHashCode;
3329      }
3330      int hash = 41;
3331      hash = (19 * hash) + getDescriptorForType().hashCode();
3332      hash = (29 * hash) + getUnknownFields().hashCode();
3333      memoizedHashCode = hash;
3334      return hash;
3335    }
3336
3337    public static org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto parseFrom(
3338        com.google.protobuf.ByteString data)
3339        throws com.google.protobuf.InvalidProtocolBufferException {
3340      return PARSER.parseFrom(data);
3341    }
3342    public static org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto parseFrom(
3343        com.google.protobuf.ByteString data,
3344        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3345        throws com.google.protobuf.InvalidProtocolBufferException {
3346      return PARSER.parseFrom(data, extensionRegistry);
3347    }
3348    public static org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto parseFrom(byte[] data)
3349        throws com.google.protobuf.InvalidProtocolBufferException {
3350      return PARSER.parseFrom(data);
3351    }
3352    public static org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto parseFrom(
3353        byte[] data,
3354        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3355        throws com.google.protobuf.InvalidProtocolBufferException {
3356      return PARSER.parseFrom(data, extensionRegistry);
3357    }
3358    public static org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto parseFrom(java.io.InputStream input)
3359        throws java.io.IOException {
3360      return PARSER.parseFrom(input);
3361    }
3362    public static org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto parseFrom(
3363        java.io.InputStream input,
3364        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3365        throws java.io.IOException {
3366      return PARSER.parseFrom(input, extensionRegistry);
3367    }
3368    public static org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto parseDelimitedFrom(java.io.InputStream input)
3369        throws java.io.IOException {
3370      return PARSER.parseDelimitedFrom(input);
3371    }
3372    public static org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto parseDelimitedFrom(
3373        java.io.InputStream input,
3374        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3375        throws java.io.IOException {
3376      return PARSER.parseDelimitedFrom(input, extensionRegistry);
3377    }
3378    public static org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto parseFrom(
3379        com.google.protobuf.CodedInputStream input)
3380        throws java.io.IOException {
3381      return PARSER.parseFrom(input);
3382    }
3383    public static org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto parseFrom(
3384        com.google.protobuf.CodedInputStream input,
3385        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3386        throws java.io.IOException {
3387      return PARSER.parseFrom(input, extensionRegistry);
3388    }
3389
3390    public static Builder newBuilder() { return Builder.create(); }
3391    public Builder newBuilderForType() { return newBuilder(); }
3392    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto prototype) {
3393      return newBuilder().mergeFrom(prototype);
3394    }
3395    public Builder toBuilder() { return newBuilder(this); }
3396
3397    @java.lang.Override
3398    protected Builder newBuilderForType(
3399        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
3400      Builder builder = new Builder(parent);
3401      return builder;
3402    }
3403    /**
3404     * Protobuf type {@code hadoop.common.SleepResponseProto}
3405     */
3406    public static final class Builder extends
3407        com.google.protobuf.GeneratedMessage.Builder<Builder>
3408       implements org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProtoOrBuilder {
3409      public static final com.google.protobuf.Descriptors.Descriptor
3410          getDescriptor() {
3411        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_SleepResponseProto_descriptor;
3412      }
3413
3414      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
3415          internalGetFieldAccessorTable() {
3416        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_SleepResponseProto_fieldAccessorTable
3417            .ensureFieldAccessorsInitialized(
3418                org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto.Builder.class);
3419      }
3420
3421      // Construct using org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto.newBuilder()
3422      private Builder() {
3423        maybeForceBuilderInitialization();
3424      }
3425
3426      private Builder(
3427          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
3428        super(parent);
3429        maybeForceBuilderInitialization();
3430      }
3431      private void maybeForceBuilderInitialization() {
3432        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
3433        }
3434      }
3435      private static Builder create() {
3436        return new Builder();
3437      }
3438
3439      public Builder clear() {
3440        super.clear();
3441        return this;
3442      }
3443
3444      public Builder clone() {
3445        return create().mergeFrom(buildPartial());
3446      }
3447
3448      public com.google.protobuf.Descriptors.Descriptor
3449          getDescriptorForType() {
3450        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_SleepResponseProto_descriptor;
3451      }
3452
3453      public org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto getDefaultInstanceForType() {
3454        return org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto.getDefaultInstance();
3455      }
3456
3457      public org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto build() {
3458        org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto result = buildPartial();
3459        if (!result.isInitialized()) {
3460          throw newUninitializedMessageException(result);
3461        }
3462        return result;
3463      }
3464
3465      public org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto buildPartial() {
3466        org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto result = new org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto(this);
3467        onBuilt();
3468        return result;
3469      }
3470
3471      public Builder mergeFrom(com.google.protobuf.Message other) {
3472        if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto) {
3473          return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto)other);
3474        } else {
3475          super.mergeFrom(other);
3476          return this;
3477        }
3478      }
3479
3480      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto other) {
3481        if (other == org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto.getDefaultInstance()) return this;
3482        this.mergeUnknownFields(other.getUnknownFields());
3483        return this;
3484      }
3485
3486      public final boolean isInitialized() {
3487        return true;
3488      }
3489
3490      public Builder mergeFrom(
3491          com.google.protobuf.CodedInputStream input,
3492          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3493          throws java.io.IOException {
3494        org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto parsedMessage = null;
3495        try {
3496          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
3497        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
3498          parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto) e.getUnfinishedMessage();
3499          throw e;
3500        } finally {
3501          if (parsedMessage != null) {
3502            mergeFrom(parsedMessage);
3503          }
3504        }
3505        return this;
3506      }
3507
3508      // @@protoc_insertion_point(builder_scope:hadoop.common.SleepResponseProto)
3509    }
3510
3511    static {
3512      defaultInstance = new SleepResponseProto(true);
3513      defaultInstance.initFields();
3514    }
3515
3516    // @@protoc_insertion_point(class_scope:hadoop.common.SleepResponseProto)
3517  }
3518
3519  public interface SlowPingRequestProtoOrBuilder
3520      extends com.google.protobuf.MessageOrBuilder {
3521
3522    // required bool shouldSlow = 1;
3523    /**
3524     * <code>required bool shouldSlow = 1;</code>
3525     */
3526    boolean hasShouldSlow();
3527    /**
3528     * <code>required bool shouldSlow = 1;</code>
3529     */
3530    boolean getShouldSlow();
3531  }
3532  /**
3533   * Protobuf type {@code hadoop.common.SlowPingRequestProto}
3534   */
3535  public static final class SlowPingRequestProto extends
3536      com.google.protobuf.GeneratedMessage
3537      implements SlowPingRequestProtoOrBuilder {
3538    // Use SlowPingRequestProto.newBuilder() to construct.
3539    private SlowPingRequestProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
3540      super(builder);
3541      this.unknownFields = builder.getUnknownFields();
3542    }
3543    private SlowPingRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
3544
3545    private static final SlowPingRequestProto defaultInstance;
3546    public static SlowPingRequestProto getDefaultInstance() {
3547      return defaultInstance;
3548    }
3549
3550    public SlowPingRequestProto getDefaultInstanceForType() {
3551      return defaultInstance;
3552    }
3553
3554    private final com.google.protobuf.UnknownFieldSet unknownFields;
3555    @java.lang.Override
3556    public final com.google.protobuf.UnknownFieldSet
3557        getUnknownFields() {
3558      return this.unknownFields;
3559    }
3560    private SlowPingRequestProto(
3561        com.google.protobuf.CodedInputStream input,
3562        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3563        throws com.google.protobuf.InvalidProtocolBufferException {
3564      initFields();
3565      int mutable_bitField0_ = 0;
3566      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
3567          com.google.protobuf.UnknownFieldSet.newBuilder();
3568      try {
3569        boolean done = false;
3570        while (!done) {
3571          int tag = input.readTag();
3572          switch (tag) {
3573            case 0:
3574              done = true;
3575              break;
3576            default: {
3577              if (!parseUnknownField(input, unknownFields,
3578                                     extensionRegistry, tag)) {
3579                done = true;
3580              }
3581              break;
3582            }
3583            case 8: {
3584              bitField0_ |= 0x00000001;
3585              shouldSlow_ = input.readBool();
3586              break;
3587            }
3588          }
3589        }
3590      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
3591        throw e.setUnfinishedMessage(this);
3592      } catch (java.io.IOException e) {
3593        throw new com.google.protobuf.InvalidProtocolBufferException(
3594            e.getMessage()).setUnfinishedMessage(this);
3595      } finally {
3596        this.unknownFields = unknownFields.build();
3597        makeExtensionsImmutable();
3598      }
3599    }
3600    public static final com.google.protobuf.Descriptors.Descriptor
3601        getDescriptor() {
3602      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_SlowPingRequestProto_descriptor;
3603    }
3604
3605    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
3606        internalGetFieldAccessorTable() {
3607      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_SlowPingRequestProto_fieldAccessorTable
3608          .ensureFieldAccessorsInitialized(
3609              org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto.class, org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto.Builder.class);
3610    }
3611
3612    public static com.google.protobuf.Parser<SlowPingRequestProto> PARSER =
3613        new com.google.protobuf.AbstractParser<SlowPingRequestProto>() {
3614      public SlowPingRequestProto parsePartialFrom(
3615          com.google.protobuf.CodedInputStream input,
3616          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3617          throws com.google.protobuf.InvalidProtocolBufferException {
3618        return new SlowPingRequestProto(input, extensionRegistry);
3619      }
3620    };
3621
3622    @java.lang.Override
3623    public com.google.protobuf.Parser<SlowPingRequestProto> getParserForType() {
3624      return PARSER;
3625    }
3626
3627    private int bitField0_;
3628    // required bool shouldSlow = 1;
3629    public static final int SHOULDSLOW_FIELD_NUMBER = 1;
3630    private boolean shouldSlow_;
3631    /**
3632     * <code>required bool shouldSlow = 1;</code>
3633     */
3634    public boolean hasShouldSlow() {
3635      return ((bitField0_ & 0x00000001) == 0x00000001);
3636    }
3637    /**
3638     * <code>required bool shouldSlow = 1;</code>
3639     */
3640    public boolean getShouldSlow() {
3641      return shouldSlow_;
3642    }
3643
3644    private void initFields() {
3645      shouldSlow_ = false;
3646    }
3647    private byte memoizedIsInitialized = -1;
3648    public final boolean isInitialized() {
3649      byte isInitialized = memoizedIsInitialized;
3650      if (isInitialized != -1) return isInitialized == 1;
3651
3652      if (!hasShouldSlow()) {
3653        memoizedIsInitialized = 0;
3654        return false;
3655      }
3656      memoizedIsInitialized = 1;
3657      return true;
3658    }
3659
3660    public void writeTo(com.google.protobuf.CodedOutputStream output)
3661                        throws java.io.IOException {
3662      getSerializedSize();
3663      if (((bitField0_ & 0x00000001) == 0x00000001)) {
3664        output.writeBool(1, shouldSlow_);
3665      }
3666      getUnknownFields().writeTo(output);
3667    }
3668
3669    private int memoizedSerializedSize = -1;
3670    public int getSerializedSize() {
3671      int size = memoizedSerializedSize;
3672      if (size != -1) return size;
3673
3674      size = 0;
3675      if (((bitField0_ & 0x00000001) == 0x00000001)) {
3676        size += com.google.protobuf.CodedOutputStream
3677          .computeBoolSize(1, shouldSlow_);
3678      }
3679      size += getUnknownFields().getSerializedSize();
3680      memoizedSerializedSize = size;
3681      return size;
3682    }
3683
3684    private static final long serialVersionUID = 0L;
3685    @java.lang.Override
3686    protected java.lang.Object writeReplace()
3687        throws java.io.ObjectStreamException {
3688      return super.writeReplace();
3689    }
3690
3691    @java.lang.Override
3692    public boolean equals(final java.lang.Object obj) {
3693      if (obj == this) {
3694       return true;
3695      }
3696      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto)) {
3697        return super.equals(obj);
3698      }
3699      org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto other = (org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto) obj;
3700
3701      boolean result = true;
3702      result = result && (hasShouldSlow() == other.hasShouldSlow());
3703      if (hasShouldSlow()) {
3704        result = result && (getShouldSlow()
3705            == other.getShouldSlow());
3706      }
3707      result = result &&
3708          getUnknownFields().equals(other.getUnknownFields());
3709      return result;
3710    }
3711
3712    private int memoizedHashCode = 0;
3713    @java.lang.Override
3714    public int hashCode() {
3715      if (memoizedHashCode != 0) {
3716        return memoizedHashCode;
3717      }
3718      int hash = 41;
3719      hash = (19 * hash) + getDescriptorForType().hashCode();
3720      if (hasShouldSlow()) {
3721        hash = (37 * hash) + SHOULDSLOW_FIELD_NUMBER;
3722        hash = (53 * hash) + hashBoolean(getShouldSlow());
3723      }
3724      hash = (29 * hash) + getUnknownFields().hashCode();
3725      memoizedHashCode = hash;
3726      return hash;
3727    }
3728
3729    public static org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto parseFrom(
3730        com.google.protobuf.ByteString data)
3731        throws com.google.protobuf.InvalidProtocolBufferException {
3732      return PARSER.parseFrom(data);
3733    }
3734    public static org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto parseFrom(
3735        com.google.protobuf.ByteString data,
3736        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3737        throws com.google.protobuf.InvalidProtocolBufferException {
3738      return PARSER.parseFrom(data, extensionRegistry);
3739    }
3740    public static org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto parseFrom(byte[] data)
3741        throws com.google.protobuf.InvalidProtocolBufferException {
3742      return PARSER.parseFrom(data);
3743    }
3744    public static org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto parseFrom(
3745        byte[] data,
3746        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3747        throws com.google.protobuf.InvalidProtocolBufferException {
3748      return PARSER.parseFrom(data, extensionRegistry);
3749    }
3750    public static org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto parseFrom(java.io.InputStream input)
3751        throws java.io.IOException {
3752      return PARSER.parseFrom(input);
3753    }
3754    public static org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto parseFrom(
3755        java.io.InputStream input,
3756        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3757        throws java.io.IOException {
3758      return PARSER.parseFrom(input, extensionRegistry);
3759    }
3760    public static org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto parseDelimitedFrom(java.io.InputStream input)
3761        throws java.io.IOException {
3762      return PARSER.parseDelimitedFrom(input);
3763    }
3764    public static org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto parseDelimitedFrom(
3765        java.io.InputStream input,
3766        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3767        throws java.io.IOException {
3768      return PARSER.parseDelimitedFrom(input, extensionRegistry);
3769    }
3770    public static org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto parseFrom(
3771        com.google.protobuf.CodedInputStream input)
3772        throws java.io.IOException {
3773      return PARSER.parseFrom(input);
3774    }
3775    public static org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto parseFrom(
3776        com.google.protobuf.CodedInputStream input,
3777        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3778        throws java.io.IOException {
3779      return PARSER.parseFrom(input, extensionRegistry);
3780    }
3781
3782    public static Builder newBuilder() { return Builder.create(); }
3783    public Builder newBuilderForType() { return newBuilder(); }
3784    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto prototype) {
3785      return newBuilder().mergeFrom(prototype);
3786    }
3787    public Builder toBuilder() { return newBuilder(this); }
3788
3789    @java.lang.Override
3790    protected Builder newBuilderForType(
3791        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
3792      Builder builder = new Builder(parent);
3793      return builder;
3794    }
3795    /**
3796     * Protobuf type {@code hadoop.common.SlowPingRequestProto}
3797     */
3798    public static final class Builder extends
3799        com.google.protobuf.GeneratedMessage.Builder<Builder>
3800       implements org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProtoOrBuilder {
3801      public static final com.google.protobuf.Descriptors.Descriptor
3802          getDescriptor() {
3803        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_SlowPingRequestProto_descriptor;
3804      }
3805
3806      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
3807          internalGetFieldAccessorTable() {
3808        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_SlowPingRequestProto_fieldAccessorTable
3809            .ensureFieldAccessorsInitialized(
3810                org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto.class, org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto.Builder.class);
3811      }
3812
3813      // Construct using org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto.newBuilder()
3814      private Builder() {
3815        maybeForceBuilderInitialization();
3816      }
3817
3818      private Builder(
3819          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
3820        super(parent);
3821        maybeForceBuilderInitialization();
3822      }
3823      private void maybeForceBuilderInitialization() {
3824        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
3825        }
3826      }
3827      private static Builder create() {
3828        return new Builder();
3829      }
3830
3831      public Builder clear() {
3832        super.clear();
3833        shouldSlow_ = false;
3834        bitField0_ = (bitField0_ & ~0x00000001);
3835        return this;
3836      }
3837
3838      public Builder clone() {
3839        return create().mergeFrom(buildPartial());
3840      }
3841
3842      public com.google.protobuf.Descriptors.Descriptor
3843          getDescriptorForType() {
3844        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_SlowPingRequestProto_descriptor;
3845      }
3846
3847      public org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto getDefaultInstanceForType() {
3848        return org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto.getDefaultInstance();
3849      }
3850
3851      public org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto build() {
3852        org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto result = buildPartial();
3853        if (!result.isInitialized()) {
3854          throw newUninitializedMessageException(result);
3855        }
3856        return result;
3857      }
3858
3859      public org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto buildPartial() {
3860        org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto result = new org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto(this);
3861        int from_bitField0_ = bitField0_;
3862        int to_bitField0_ = 0;
3863        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
3864          to_bitField0_ |= 0x00000001;
3865        }
3866        result.shouldSlow_ = shouldSlow_;
3867        result.bitField0_ = to_bitField0_;
3868        onBuilt();
3869        return result;
3870      }
3871
3872      public Builder mergeFrom(com.google.protobuf.Message other) {
3873        if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto) {
3874          return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto)other);
3875        } else {
3876          super.mergeFrom(other);
3877          return this;
3878        }
3879      }
3880
3881      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto other) {
3882        if (other == org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto.getDefaultInstance()) return this;
3883        if (other.hasShouldSlow()) {
3884          setShouldSlow(other.getShouldSlow());
3885        }
3886        this.mergeUnknownFields(other.getUnknownFields());
3887        return this;
3888      }
3889
3890      public final boolean isInitialized() {
3891        if (!hasShouldSlow()) {
3892          
3893          return false;
3894        }
3895        return true;
3896      }
3897
3898      public Builder mergeFrom(
3899          com.google.protobuf.CodedInputStream input,
3900          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3901          throws java.io.IOException {
3902        org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto parsedMessage = null;
3903        try {
3904          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
3905        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
3906          parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto) e.getUnfinishedMessage();
3907          throw e;
3908        } finally {
3909          if (parsedMessage != null) {
3910            mergeFrom(parsedMessage);
3911          }
3912        }
3913        return this;
3914      }
3915      private int bitField0_;
3916
3917      // required bool shouldSlow = 1;
3918      private boolean shouldSlow_ ;
3919      /**
3920       * <code>required bool shouldSlow = 1;</code>
3921       */
3922      public boolean hasShouldSlow() {
3923        return ((bitField0_ & 0x00000001) == 0x00000001);
3924      }
3925      /**
3926       * <code>required bool shouldSlow = 1;</code>
3927       */
3928      public boolean getShouldSlow() {
3929        return shouldSlow_;
3930      }
3931      /**
3932       * <code>required bool shouldSlow = 1;</code>
3933       */
3934      public Builder setShouldSlow(boolean value) {
3935        bitField0_ |= 0x00000001;
3936        shouldSlow_ = value;
3937        onChanged();
3938        return this;
3939      }
3940      /**
3941       * <code>required bool shouldSlow = 1;</code>
3942       */
3943      public Builder clearShouldSlow() {
3944        bitField0_ = (bitField0_ & ~0x00000001);
3945        shouldSlow_ = false;
3946        onChanged();
3947        return this;
3948      }
3949
3950      // @@protoc_insertion_point(builder_scope:hadoop.common.SlowPingRequestProto)
3951    }
3952
3953    static {
3954      defaultInstance = new SlowPingRequestProto(true);
3955      defaultInstance.initFields();
3956    }
3957
3958    // @@protoc_insertion_point(class_scope:hadoop.common.SlowPingRequestProto)
3959  }
3960
3961  public interface EchoRequestProto2OrBuilder
3962      extends com.google.protobuf.MessageOrBuilder {
3963
3964    // repeated string message = 1;
3965    /**
3966     * <code>repeated string message = 1;</code>
3967     */
3968    java.util.List<java.lang.String>
3969    getMessageList();
3970    /**
3971     * <code>repeated string message = 1;</code>
3972     */
3973    int getMessageCount();
3974    /**
3975     * <code>repeated string message = 1;</code>
3976     */
3977    java.lang.String getMessage(int index);
3978    /**
3979     * <code>repeated string message = 1;</code>
3980     */
3981    com.google.protobuf.ByteString
3982        getMessageBytes(int index);
3983  }
3984  /**
3985   * Protobuf type {@code hadoop.common.EchoRequestProto2}
3986   */
3987  public static final class EchoRequestProto2 extends
3988      com.google.protobuf.GeneratedMessage
3989      implements EchoRequestProto2OrBuilder {
3990    // Use EchoRequestProto2.newBuilder() to construct.
3991    private EchoRequestProto2(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
3992      super(builder);
3993      this.unknownFields = builder.getUnknownFields();
3994    }
3995    private EchoRequestProto2(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
3996
3997    private static final EchoRequestProto2 defaultInstance;
3998    public static EchoRequestProto2 getDefaultInstance() {
3999      return defaultInstance;
4000    }
4001
4002    public EchoRequestProto2 getDefaultInstanceForType() {
4003      return defaultInstance;
4004    }
4005
4006    private final com.google.protobuf.UnknownFieldSet unknownFields;
4007    @java.lang.Override
4008    public final com.google.protobuf.UnknownFieldSet
4009        getUnknownFields() {
4010      return this.unknownFields;
4011    }
4012    private EchoRequestProto2(
4013        com.google.protobuf.CodedInputStream input,
4014        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4015        throws com.google.protobuf.InvalidProtocolBufferException {
4016      initFields();
4017      int mutable_bitField0_ = 0;
4018      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
4019          com.google.protobuf.UnknownFieldSet.newBuilder();
4020      try {
4021        boolean done = false;
4022        while (!done) {
4023          int tag = input.readTag();
4024          switch (tag) {
4025            case 0:
4026              done = true;
4027              break;
4028            default: {
4029              if (!parseUnknownField(input, unknownFields,
4030                                     extensionRegistry, tag)) {
4031                done = true;
4032              }
4033              break;
4034            }
4035            case 10: {
4036              if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
4037                message_ = new com.google.protobuf.LazyStringArrayList();
4038                mutable_bitField0_ |= 0x00000001;
4039              }
4040              message_.add(input.readBytes());
4041              break;
4042            }
4043          }
4044        }
4045      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
4046        throw e.setUnfinishedMessage(this);
4047      } catch (java.io.IOException e) {
4048        throw new com.google.protobuf.InvalidProtocolBufferException(
4049            e.getMessage()).setUnfinishedMessage(this);
4050      } finally {
4051        if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
4052          message_ = new com.google.protobuf.UnmodifiableLazyStringList(message_);
4053        }
4054        this.unknownFields = unknownFields.build();
4055        makeExtensionsImmutable();
4056      }
4057    }
4058    public static final com.google.protobuf.Descriptors.Descriptor
4059        getDescriptor() {
4060      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoRequestProto2_descriptor;
4061    }
4062
4063    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
4064        internalGetFieldAccessorTable() {
4065      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoRequestProto2_fieldAccessorTable
4066          .ensureFieldAccessorsInitialized(
4067              org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2.class, org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2.Builder.class);
4068    }
4069
4070    public static com.google.protobuf.Parser<EchoRequestProto2> PARSER =
4071        new com.google.protobuf.AbstractParser<EchoRequestProto2>() {
4072      public EchoRequestProto2 parsePartialFrom(
4073          com.google.protobuf.CodedInputStream input,
4074          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4075          throws com.google.protobuf.InvalidProtocolBufferException {
4076        return new EchoRequestProto2(input, extensionRegistry);
4077      }
4078    };
4079
4080    @java.lang.Override
4081    public com.google.protobuf.Parser<EchoRequestProto2> getParserForType() {
4082      return PARSER;
4083    }
4084
4085    // repeated string message = 1;
4086    public static final int MESSAGE_FIELD_NUMBER = 1;
4087    private com.google.protobuf.LazyStringList message_;
4088    /**
4089     * <code>repeated string message = 1;</code>
4090     */
4091    public java.util.List<java.lang.String>
4092        getMessageList() {
4093      return message_;
4094    }
4095    /**
4096     * <code>repeated string message = 1;</code>
4097     */
4098    public int getMessageCount() {
4099      return message_.size();
4100    }
4101    /**
4102     * <code>repeated string message = 1;</code>
4103     */
4104    public java.lang.String getMessage(int index) {
4105      return message_.get(index);
4106    }
4107    /**
4108     * <code>repeated string message = 1;</code>
4109     */
4110    public com.google.protobuf.ByteString
4111        getMessageBytes(int index) {
4112      return message_.getByteString(index);
4113    }
4114
4115    private void initFields() {
4116      message_ = com.google.protobuf.LazyStringArrayList.EMPTY;
4117    }
4118    private byte memoizedIsInitialized = -1;
4119    public final boolean isInitialized() {
4120      byte isInitialized = memoizedIsInitialized;
4121      if (isInitialized != -1) return isInitialized == 1;
4122
4123      memoizedIsInitialized = 1;
4124      return true;
4125    }
4126
4127    public void writeTo(com.google.protobuf.CodedOutputStream output)
4128                        throws java.io.IOException {
4129      getSerializedSize();
4130      for (int i = 0; i < message_.size(); i++) {
4131        output.writeBytes(1, message_.getByteString(i));
4132      }
4133      getUnknownFields().writeTo(output);
4134    }
4135
4136    private int memoizedSerializedSize = -1;
4137    public int getSerializedSize() {
4138      int size = memoizedSerializedSize;
4139      if (size != -1) return size;
4140
4141      size = 0;
4142      {
4143        int dataSize = 0;
4144        for (int i = 0; i < message_.size(); i++) {
4145          dataSize += com.google.protobuf.CodedOutputStream
4146            .computeBytesSizeNoTag(message_.getByteString(i));
4147        }
4148        size += dataSize;
4149        size += 1 * getMessageList().size();
4150      }
4151      size += getUnknownFields().getSerializedSize();
4152      memoizedSerializedSize = size;
4153      return size;
4154    }
4155
4156    private static final long serialVersionUID = 0L;
4157    @java.lang.Override
4158    protected java.lang.Object writeReplace()
4159        throws java.io.ObjectStreamException {
4160      return super.writeReplace();
4161    }
4162
4163    @java.lang.Override
4164    public boolean equals(final java.lang.Object obj) {
4165      if (obj == this) {
4166       return true;
4167      }
4168      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2)) {
4169        return super.equals(obj);
4170      }
4171      org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 other = (org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2) obj;
4172
4173      boolean result = true;
4174      result = result && getMessageList()
4175          .equals(other.getMessageList());
4176      result = result &&
4177          getUnknownFields().equals(other.getUnknownFields());
4178      return result;
4179    }
4180
4181    private int memoizedHashCode = 0;
4182    @java.lang.Override
4183    public int hashCode() {
4184      if (memoizedHashCode != 0) {
4185        return memoizedHashCode;
4186      }
4187      int hash = 41;
4188      hash = (19 * hash) + getDescriptorForType().hashCode();
4189      if (getMessageCount() > 0) {
4190        hash = (37 * hash) + MESSAGE_FIELD_NUMBER;
4191        hash = (53 * hash) + getMessageList().hashCode();
4192      }
4193      hash = (29 * hash) + getUnknownFields().hashCode();
4194      memoizedHashCode = hash;
4195      return hash;
4196    }
4197
4198    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 parseFrom(
4199        com.google.protobuf.ByteString data)
4200        throws com.google.protobuf.InvalidProtocolBufferException {
4201      return PARSER.parseFrom(data);
4202    }
4203    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 parseFrom(
4204        com.google.protobuf.ByteString data,
4205        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4206        throws com.google.protobuf.InvalidProtocolBufferException {
4207      return PARSER.parseFrom(data, extensionRegistry);
4208    }
4209    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 parseFrom(byte[] data)
4210        throws com.google.protobuf.InvalidProtocolBufferException {
4211      return PARSER.parseFrom(data);
4212    }
4213    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 parseFrom(
4214        byte[] data,
4215        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4216        throws com.google.protobuf.InvalidProtocolBufferException {
4217      return PARSER.parseFrom(data, extensionRegistry);
4218    }
4219    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 parseFrom(java.io.InputStream input)
4220        throws java.io.IOException {
4221      return PARSER.parseFrom(input);
4222    }
4223    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 parseFrom(
4224        java.io.InputStream input,
4225        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4226        throws java.io.IOException {
4227      return PARSER.parseFrom(input, extensionRegistry);
4228    }
4229    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 parseDelimitedFrom(java.io.InputStream input)
4230        throws java.io.IOException {
4231      return PARSER.parseDelimitedFrom(input);
4232    }
4233    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 parseDelimitedFrom(
4234        java.io.InputStream input,
4235        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4236        throws java.io.IOException {
4237      return PARSER.parseDelimitedFrom(input, extensionRegistry);
4238    }
4239    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 parseFrom(
4240        com.google.protobuf.CodedInputStream input)
4241        throws java.io.IOException {
4242      return PARSER.parseFrom(input);
4243    }
4244    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 parseFrom(
4245        com.google.protobuf.CodedInputStream input,
4246        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4247        throws java.io.IOException {
4248      return PARSER.parseFrom(input, extensionRegistry);
4249    }
4250
4251    public static Builder newBuilder() { return Builder.create(); }
4252    public Builder newBuilderForType() { return newBuilder(); }
4253    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 prototype) {
4254      return newBuilder().mergeFrom(prototype);
4255    }
4256    public Builder toBuilder() { return newBuilder(this); }
4257
4258    @java.lang.Override
4259    protected Builder newBuilderForType(
4260        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
4261      Builder builder = new Builder(parent);
4262      return builder;
4263    }
4264    /**
4265     * Protobuf type {@code hadoop.common.EchoRequestProto2}
4266     */
4267    public static final class Builder extends
4268        com.google.protobuf.GeneratedMessage.Builder<Builder>
4269       implements org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2OrBuilder {
4270      public static final com.google.protobuf.Descriptors.Descriptor
4271          getDescriptor() {
4272        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoRequestProto2_descriptor;
4273      }
4274
4275      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
4276          internalGetFieldAccessorTable() {
4277        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoRequestProto2_fieldAccessorTable
4278            .ensureFieldAccessorsInitialized(
4279                org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2.class, org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2.Builder.class);
4280      }
4281
4282      // Construct using org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2.newBuilder()
4283      private Builder() {
4284        maybeForceBuilderInitialization();
4285      }
4286
4287      private Builder(
4288          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
4289        super(parent);
4290        maybeForceBuilderInitialization();
4291      }
4292      private void maybeForceBuilderInitialization() {
4293        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
4294        }
4295      }
4296      private static Builder create() {
4297        return new Builder();
4298      }
4299
4300      public Builder clear() {
4301        super.clear();
4302        message_ = com.google.protobuf.LazyStringArrayList.EMPTY;
4303        bitField0_ = (bitField0_ & ~0x00000001);
4304        return this;
4305      }
4306
4307      public Builder clone() {
4308        return create().mergeFrom(buildPartial());
4309      }
4310
4311      public com.google.protobuf.Descriptors.Descriptor
4312          getDescriptorForType() {
4313        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoRequestProto2_descriptor;
4314      }
4315
4316      public org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 getDefaultInstanceForType() {
4317        return org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2.getDefaultInstance();
4318      }
4319
4320      public org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 build() {
4321        org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 result = buildPartial();
4322        if (!result.isInitialized()) {
4323          throw newUninitializedMessageException(result);
4324        }
4325        return result;
4326      }
4327
4328      public org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 buildPartial() {
4329        org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 result = new org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2(this);
4330        int from_bitField0_ = bitField0_;
4331        if (((bitField0_ & 0x00000001) == 0x00000001)) {
4332          message_ = new com.google.protobuf.UnmodifiableLazyStringList(
4333              message_);
4334          bitField0_ = (bitField0_ & ~0x00000001);
4335        }
4336        result.message_ = message_;
4337        onBuilt();
4338        return result;
4339      }
4340
4341      public Builder mergeFrom(com.google.protobuf.Message other) {
4342        if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2) {
4343          return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2)other);
4344        } else {
4345          super.mergeFrom(other);
4346          return this;
4347        }
4348      }
4349
4350      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 other) {
4351        if (other == org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2.getDefaultInstance()) return this;
4352        if (!other.message_.isEmpty()) {
4353          if (message_.isEmpty()) {
4354            message_ = other.message_;
4355            bitField0_ = (bitField0_ & ~0x00000001);
4356          } else {
4357            ensureMessageIsMutable();
4358            message_.addAll(other.message_);
4359          }
4360          onChanged();
4361        }
4362        this.mergeUnknownFields(other.getUnknownFields());
4363        return this;
4364      }
4365
4366      public final boolean isInitialized() {
4367        return true;
4368      }
4369
4370      public Builder mergeFrom(
4371          com.google.protobuf.CodedInputStream input,
4372          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4373          throws java.io.IOException {
4374        org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 parsedMessage = null;
4375        try {
4376          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
4377        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
4378          parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2) e.getUnfinishedMessage();
4379          throw e;
4380        } finally {
4381          if (parsedMessage != null) {
4382            mergeFrom(parsedMessage);
4383          }
4384        }
4385        return this;
4386      }
4387      private int bitField0_;
4388
4389      // repeated string message = 1;
4390      private com.google.protobuf.LazyStringList message_ = com.google.protobuf.LazyStringArrayList.EMPTY;
4391      private void ensureMessageIsMutable() {
4392        if (!((bitField0_ & 0x00000001) == 0x00000001)) {
4393          message_ = new com.google.protobuf.LazyStringArrayList(message_);
4394          bitField0_ |= 0x00000001;
4395         }
4396      }
4397      /**
4398       * <code>repeated string message = 1;</code>
4399       */
4400      public java.util.List<java.lang.String>
4401          getMessageList() {
4402        return java.util.Collections.unmodifiableList(message_);
4403      }
4404      /**
4405       * <code>repeated string message = 1;</code>
4406       */
4407      public int getMessageCount() {
4408        return message_.size();
4409      }
4410      /**
4411       * <code>repeated string message = 1;</code>
4412       */
4413      public java.lang.String getMessage(int index) {
4414        return message_.get(index);
4415      }
4416      /**
4417       * <code>repeated string message = 1;</code>
4418       */
4419      public com.google.protobuf.ByteString
4420          getMessageBytes(int index) {
4421        return message_.getByteString(index);
4422      }
4423      /**
4424       * <code>repeated string message = 1;</code>
4425       */
4426      public Builder setMessage(
4427          int index, java.lang.String value) {
4428        if (value == null) {
4429    throw new NullPointerException();
4430  }
4431  ensureMessageIsMutable();
4432        message_.set(index, value);
4433        onChanged();
4434        return this;
4435      }
4436      /**
4437       * <code>repeated string message = 1;</code>
4438       */
4439      public Builder addMessage(
4440          java.lang.String value) {
4441        if (value == null) {
4442    throw new NullPointerException();
4443  }
4444  ensureMessageIsMutable();
4445        message_.add(value);
4446        onChanged();
4447        return this;
4448      }
4449      /**
4450       * <code>repeated string message = 1;</code>
4451       */
4452      public Builder addAllMessage(
4453          java.lang.Iterable<java.lang.String> values) {
4454        ensureMessageIsMutable();
4455        super.addAll(values, message_);
4456        onChanged();
4457        return this;
4458      }
4459      /**
4460       * <code>repeated string message = 1;</code>
4461       */
4462      public Builder clearMessage() {
4463        message_ = com.google.protobuf.LazyStringArrayList.EMPTY;
4464        bitField0_ = (bitField0_ & ~0x00000001);
4465        onChanged();
4466        return this;
4467      }
4468      /**
4469       * <code>repeated string message = 1;</code>
4470       */
4471      public Builder addMessageBytes(
4472          com.google.protobuf.ByteString value) {
4473        if (value == null) {
4474    throw new NullPointerException();
4475  }
4476  ensureMessageIsMutable();
4477        message_.add(value);
4478        onChanged();
4479        return this;
4480      }
4481
4482      // @@protoc_insertion_point(builder_scope:hadoop.common.EchoRequestProto2)
4483    }
4484
4485    static {
4486      defaultInstance = new EchoRequestProto2(true);
4487      defaultInstance.initFields();
4488    }
4489
4490    // @@protoc_insertion_point(class_scope:hadoop.common.EchoRequestProto2)
4491  }
4492
4493  public interface EchoResponseProto2OrBuilder
4494      extends com.google.protobuf.MessageOrBuilder {
4495
4496    // repeated string message = 1;
4497    /**
4498     * <code>repeated string message = 1;</code>
4499     */
4500    java.util.List<java.lang.String>
4501    getMessageList();
4502    /**
4503     * <code>repeated string message = 1;</code>
4504     */
4505    int getMessageCount();
4506    /**
4507     * <code>repeated string message = 1;</code>
4508     */
4509    java.lang.String getMessage(int index);
4510    /**
4511     * <code>repeated string message = 1;</code>
4512     */
4513    com.google.protobuf.ByteString
4514        getMessageBytes(int index);
4515  }
4516  /**
4517   * Protobuf type {@code hadoop.common.EchoResponseProto2}
4518   */
4519  public static final class EchoResponseProto2 extends
4520      com.google.protobuf.GeneratedMessage
4521      implements EchoResponseProto2OrBuilder {
4522    // Use EchoResponseProto2.newBuilder() to construct.
4523    private EchoResponseProto2(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
4524      super(builder);
4525      this.unknownFields = builder.getUnknownFields();
4526    }
4527    private EchoResponseProto2(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
4528
4529    private static final EchoResponseProto2 defaultInstance;
4530    public static EchoResponseProto2 getDefaultInstance() {
4531      return defaultInstance;
4532    }
4533
4534    public EchoResponseProto2 getDefaultInstanceForType() {
4535      return defaultInstance;
4536    }
4537
4538    private final com.google.protobuf.UnknownFieldSet unknownFields;
4539    @java.lang.Override
4540    public final com.google.protobuf.UnknownFieldSet
4541        getUnknownFields() {
4542      return this.unknownFields;
4543    }
4544    private EchoResponseProto2(
4545        com.google.protobuf.CodedInputStream input,
4546        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4547        throws com.google.protobuf.InvalidProtocolBufferException {
4548      initFields();
4549      int mutable_bitField0_ = 0;
4550      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
4551          com.google.protobuf.UnknownFieldSet.newBuilder();
4552      try {
4553        boolean done = false;
4554        while (!done) {
4555          int tag = input.readTag();
4556          switch (tag) {
4557            case 0:
4558              done = true;
4559              break;
4560            default: {
4561              if (!parseUnknownField(input, unknownFields,
4562                                     extensionRegistry, tag)) {
4563                done = true;
4564              }
4565              break;
4566            }
4567            case 10: {
4568              if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
4569                message_ = new com.google.protobuf.LazyStringArrayList();
4570                mutable_bitField0_ |= 0x00000001;
4571              }
4572              message_.add(input.readBytes());
4573              break;
4574            }
4575          }
4576        }
4577      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
4578        throw e.setUnfinishedMessage(this);
4579      } catch (java.io.IOException e) {
4580        throw new com.google.protobuf.InvalidProtocolBufferException(
4581            e.getMessage()).setUnfinishedMessage(this);
4582      } finally {
4583        if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
4584          message_ = new com.google.protobuf.UnmodifiableLazyStringList(message_);
4585        }
4586        this.unknownFields = unknownFields.build();
4587        makeExtensionsImmutable();
4588      }
4589    }
4590    public static final com.google.protobuf.Descriptors.Descriptor
4591        getDescriptor() {
4592      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoResponseProto2_descriptor;
4593    }
4594
4595    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
4596        internalGetFieldAccessorTable() {
4597      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoResponseProto2_fieldAccessorTable
4598          .ensureFieldAccessorsInitialized(
4599              org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2.class, org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2.Builder.class);
4600    }
4601
4602    public static com.google.protobuf.Parser<EchoResponseProto2> PARSER =
4603        new com.google.protobuf.AbstractParser<EchoResponseProto2>() {
4604      public EchoResponseProto2 parsePartialFrom(
4605          com.google.protobuf.CodedInputStream input,
4606          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4607          throws com.google.protobuf.InvalidProtocolBufferException {
4608        return new EchoResponseProto2(input, extensionRegistry);
4609      }
4610    };
4611
4612    @java.lang.Override
4613    public com.google.protobuf.Parser<EchoResponseProto2> getParserForType() {
4614      return PARSER;
4615    }
4616
4617    // repeated string message = 1;
4618    public static final int MESSAGE_FIELD_NUMBER = 1;
4619    private com.google.protobuf.LazyStringList message_;
4620    /**
4621     * <code>repeated string message = 1;</code>
4622     */
4623    public java.util.List<java.lang.String>
4624        getMessageList() {
4625      return message_;
4626    }
4627    /**
4628     * <code>repeated string message = 1;</code>
4629     */
4630    public int getMessageCount() {
4631      return message_.size();
4632    }
4633    /**
4634     * <code>repeated string message = 1;</code>
4635     */
4636    public java.lang.String getMessage(int index) {
4637      return message_.get(index);
4638    }
4639    /**
4640     * <code>repeated string message = 1;</code>
4641     */
4642    public com.google.protobuf.ByteString
4643        getMessageBytes(int index) {
4644      return message_.getByteString(index);
4645    }
4646
4647    private void initFields() {
4648      message_ = com.google.protobuf.LazyStringArrayList.EMPTY;
4649    }
4650    private byte memoizedIsInitialized = -1;
4651    public final boolean isInitialized() {
4652      byte isInitialized = memoizedIsInitialized;
4653      if (isInitialized != -1) return isInitialized == 1;
4654
4655      memoizedIsInitialized = 1;
4656      return true;
4657    }
4658
4659    public void writeTo(com.google.protobuf.CodedOutputStream output)
4660                        throws java.io.IOException {
4661      getSerializedSize();
4662      for (int i = 0; i < message_.size(); i++) {
4663        output.writeBytes(1, message_.getByteString(i));
4664      }
4665      getUnknownFields().writeTo(output);
4666    }
4667
4668    private int memoizedSerializedSize = -1;
4669    public int getSerializedSize() {
4670      int size = memoizedSerializedSize;
4671      if (size != -1) return size;
4672
4673      size = 0;
4674      {
4675        int dataSize = 0;
4676        for (int i = 0; i < message_.size(); i++) {
4677          dataSize += com.google.protobuf.CodedOutputStream
4678            .computeBytesSizeNoTag(message_.getByteString(i));
4679        }
4680        size += dataSize;
4681        size += 1 * getMessageList().size();
4682      }
4683      size += getUnknownFields().getSerializedSize();
4684      memoizedSerializedSize = size;
4685      return size;
4686    }
4687
4688    private static final long serialVersionUID = 0L;
4689    @java.lang.Override
4690    protected java.lang.Object writeReplace()
4691        throws java.io.ObjectStreamException {
4692      return super.writeReplace();
4693    }
4694
4695    @java.lang.Override
4696    public boolean equals(final java.lang.Object obj) {
4697      if (obj == this) {
4698       return true;
4699      }
4700      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2)) {
4701        return super.equals(obj);
4702      }
4703      org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2 other = (org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2) obj;
4704
4705      boolean result = true;
4706      result = result && getMessageList()
4707          .equals(other.getMessageList());
4708      result = result &&
4709          getUnknownFields().equals(other.getUnknownFields());
4710      return result;
4711    }
4712
4713    private int memoizedHashCode = 0;
4714    @java.lang.Override
4715    public int hashCode() {
4716      if (memoizedHashCode != 0) {
4717        return memoizedHashCode;
4718      }
4719      int hash = 41;
4720      hash = (19 * hash) + getDescriptorForType().hashCode();
4721      if (getMessageCount() > 0) {
4722        hash = (37 * hash) + MESSAGE_FIELD_NUMBER;
4723        hash = (53 * hash) + getMessageList().hashCode();
4724      }
4725      hash = (29 * hash) + getUnknownFields().hashCode();
4726      memoizedHashCode = hash;
4727      return hash;
4728    }
4729
4730    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2 parseFrom(
4731        com.google.protobuf.ByteString data)
4732        throws com.google.protobuf.InvalidProtocolBufferException {
4733      return PARSER.parseFrom(data);
4734    }
4735    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2 parseFrom(
4736        com.google.protobuf.ByteString data,
4737        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4738        throws com.google.protobuf.InvalidProtocolBufferException {
4739      return PARSER.parseFrom(data, extensionRegistry);
4740    }
4741    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2 parseFrom(byte[] data)
4742        throws com.google.protobuf.InvalidProtocolBufferException {
4743      return PARSER.parseFrom(data);
4744    }
4745    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2 parseFrom(
4746        byte[] data,
4747        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4748        throws com.google.protobuf.InvalidProtocolBufferException {
4749      return PARSER.parseFrom(data, extensionRegistry);
4750    }
4751    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2 parseFrom(java.io.InputStream input)
4752        throws java.io.IOException {
4753      return PARSER.parseFrom(input);
4754    }
4755    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2 parseFrom(
4756        java.io.InputStream input,
4757        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4758        throws java.io.IOException {
4759      return PARSER.parseFrom(input, extensionRegistry);
4760    }
4761    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2 parseDelimitedFrom(java.io.InputStream input)
4762        throws java.io.IOException {
4763      return PARSER.parseDelimitedFrom(input);
4764    }
4765    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2 parseDelimitedFrom(
4766        java.io.InputStream input,
4767        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4768        throws java.io.IOException {
4769      return PARSER.parseDelimitedFrom(input, extensionRegistry);
4770    }
4771    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2 parseFrom(
4772        com.google.protobuf.CodedInputStream input)
4773        throws java.io.IOException {
4774      return PARSER.parseFrom(input);
4775    }
4776    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2 parseFrom(
4777        com.google.protobuf.CodedInputStream input,
4778        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4779        throws java.io.IOException {
4780      return PARSER.parseFrom(input, extensionRegistry);
4781    }
4782
4783    public static Builder newBuilder() { return Builder.create(); }
4784    public Builder newBuilderForType() { return newBuilder(); }
4785    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2 prototype) {
4786      return newBuilder().mergeFrom(prototype);
4787    }
4788    public Builder toBuilder() { return newBuilder(this); }
4789
4790    @java.lang.Override
4791    protected Builder newBuilderForType(
4792        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
4793      Builder builder = new Builder(parent);
4794      return builder;
4795    }
4796    /**
4797     * Protobuf type {@code hadoop.common.EchoResponseProto2}
4798     */
4799    public static final class Builder extends
4800        com.google.protobuf.GeneratedMessage.Builder<Builder>
4801       implements org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2OrBuilder {
4802      public static final com.google.protobuf.Descriptors.Descriptor
4803          getDescriptor() {
4804        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoResponseProto2_descriptor;
4805      }
4806
4807      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
4808          internalGetFieldAccessorTable() {
4809        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoResponseProto2_fieldAccessorTable
4810            .ensureFieldAccessorsInitialized(
4811                org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2.class, org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2.Builder.class);
4812      }
4813
4814      // Construct using org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2.newBuilder()
4815      private Builder() {
4816        maybeForceBuilderInitialization();
4817      }
4818
4819      private Builder(
4820          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
4821        super(parent);
4822        maybeForceBuilderInitialization();
4823      }
4824      private void maybeForceBuilderInitialization() {
4825        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
4826        }
4827      }
4828      private static Builder create() {
4829        return new Builder();
4830      }
4831
4832      public Builder clear() {
4833        super.clear();
4834        message_ = com.google.protobuf.LazyStringArrayList.EMPTY;
4835        bitField0_ = (bitField0_ & ~0x00000001);
4836        return this;
4837      }
4838
4839      public Builder clone() {
4840        return create().mergeFrom(buildPartial());
4841      }
4842
4843      public com.google.protobuf.Descriptors.Descriptor
4844          getDescriptorForType() {
4845        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoResponseProto2_descriptor;
4846      }
4847
4848      public org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2 getDefaultInstanceForType() {
4849        return org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2.getDefaultInstance();
4850      }
4851
4852      public org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2 build() {
4853        org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2 result = buildPartial();
4854        if (!result.isInitialized()) {
4855          throw newUninitializedMessageException(result);
4856        }
4857        return result;
4858      }
4859
4860      public org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2 buildPartial() {
4861        org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2 result = new org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2(this);
4862        int from_bitField0_ = bitField0_;
4863        if (((bitField0_ & 0x00000001) == 0x00000001)) {
4864          message_ = new com.google.protobuf.UnmodifiableLazyStringList(
4865              message_);
4866          bitField0_ = (bitField0_ & ~0x00000001);
4867        }
4868        result.message_ = message_;
4869        onBuilt();
4870        return result;
4871      }
4872
4873      public Builder mergeFrom(com.google.protobuf.Message other) {
4874        if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2) {
4875          return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2)other);
4876        } else {
4877          super.mergeFrom(other);
4878          return this;
4879        }
4880      }
4881
4882      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2 other) {
4883        if (other == org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2.getDefaultInstance()) return this;
4884        if (!other.message_.isEmpty()) {
4885          if (message_.isEmpty()) {
4886            message_ = other.message_;
4887            bitField0_ = (bitField0_ & ~0x00000001);
4888          } else {
4889            ensureMessageIsMutable();
4890            message_.addAll(other.message_);
4891          }
4892          onChanged();
4893        }
4894        this.mergeUnknownFields(other.getUnknownFields());
4895        return this;
4896      }
4897
4898      public final boolean isInitialized() {
4899        return true;
4900      }
4901
4902      public Builder mergeFrom(
4903          com.google.protobuf.CodedInputStream input,
4904          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4905          throws java.io.IOException {
4906        org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2 parsedMessage = null;
4907        try {
4908          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
4909        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
4910          parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2) e.getUnfinishedMessage();
4911          throw e;
4912        } finally {
4913          if (parsedMessage != null) {
4914            mergeFrom(parsedMessage);
4915          }
4916        }
4917        return this;
4918      }
4919      private int bitField0_;
4920
4921      // repeated string message = 1;
4922      private com.google.protobuf.LazyStringList message_ = com.google.protobuf.LazyStringArrayList.EMPTY;
4923      private void ensureMessageIsMutable() {
4924        if (!((bitField0_ & 0x00000001) == 0x00000001)) {
4925          message_ = new com.google.protobuf.LazyStringArrayList(message_);
4926          bitField0_ |= 0x00000001;
4927         }
4928      }
4929      /**
4930       * <code>repeated string message = 1;</code>
4931       */
4932      public java.util.List<java.lang.String>
4933          getMessageList() {
4934        return java.util.Collections.unmodifiableList(message_);
4935      }
4936      /**
4937       * <code>repeated string message = 1;</code>
4938       */
4939      public int getMessageCount() {
4940        return message_.size();
4941      }
4942      /**
4943       * <code>repeated string message = 1;</code>
4944       */
4945      public java.lang.String getMessage(int index) {
4946        return message_.get(index);
4947      }
4948      /**
4949       * <code>repeated string message = 1;</code>
4950       */
4951      public com.google.protobuf.ByteString
4952          getMessageBytes(int index) {
4953        return message_.getByteString(index);
4954      }
4955      /**
4956       * <code>repeated string message = 1;</code>
4957       */
4958      public Builder setMessage(
4959          int index, java.lang.String value) {
4960        if (value == null) {
4961    throw new NullPointerException();
4962  }
4963  ensureMessageIsMutable();
4964        message_.set(index, value);
4965        onChanged();
4966        return this;
4967      }
4968      /**
4969       * <code>repeated string message = 1;</code>
4970       */
4971      public Builder addMessage(
4972          java.lang.String value) {
4973        if (value == null) {
4974    throw new NullPointerException();
4975  }
4976  ensureMessageIsMutable();
4977        message_.add(value);
4978        onChanged();
4979        return this;
4980      }
4981      /**
4982       * <code>repeated string message = 1;</code>
4983       */
4984      public Builder addAllMessage(
4985          java.lang.Iterable<java.lang.String> values) {
4986        ensureMessageIsMutable();
4987        super.addAll(values, message_);
4988        onChanged();
4989        return this;
4990      }
4991      /**
4992       * <code>repeated string message = 1;</code>
4993       */
4994      public Builder clearMessage() {
4995        message_ = com.google.protobuf.LazyStringArrayList.EMPTY;
4996        bitField0_ = (bitField0_ & ~0x00000001);
4997        onChanged();
4998        return this;
4999      }
5000      /**
5001       * <code>repeated string message = 1;</code>
5002       */
5003      public Builder addMessageBytes(
5004          com.google.protobuf.ByteString value) {
5005        if (value == null) {
5006    throw new NullPointerException();
5007  }
5008  ensureMessageIsMutable();
5009        message_.add(value);
5010        onChanged();
5011        return this;
5012      }
5013
5014      // @@protoc_insertion_point(builder_scope:hadoop.common.EchoResponseProto2)
5015    }
5016
5017    static {
5018      defaultInstance = new EchoResponseProto2(true);
5019      defaultInstance.initFields();
5020    }
5021
5022    // @@protoc_insertion_point(class_scope:hadoop.common.EchoResponseProto2)
5023  }
5024
5025  public interface AddRequestProtoOrBuilder
5026      extends com.google.protobuf.MessageOrBuilder {
5027
5028    // required int32 param1 = 1;
5029    /**
5030     * <code>required int32 param1 = 1;</code>
5031     */
5032    boolean hasParam1();
5033    /**
5034     * <code>required int32 param1 = 1;</code>
5035     */
5036    int getParam1();
5037
5038    // required int32 param2 = 2;
5039    /**
5040     * <code>required int32 param2 = 2;</code>
5041     */
5042    boolean hasParam2();
5043    /**
5044     * <code>required int32 param2 = 2;</code>
5045     */
5046    int getParam2();
5047  }
5048  /**
5049   * Protobuf type {@code hadoop.common.AddRequestProto}
5050   */
5051  public static final class AddRequestProto extends
5052      com.google.protobuf.GeneratedMessage
5053      implements AddRequestProtoOrBuilder {
5054    // Use AddRequestProto.newBuilder() to construct.
5055    private AddRequestProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
5056      super(builder);
5057      this.unknownFields = builder.getUnknownFields();
5058    }
5059    private AddRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
5060
5061    private static final AddRequestProto defaultInstance;
5062    public static AddRequestProto getDefaultInstance() {
5063      return defaultInstance;
5064    }
5065
5066    public AddRequestProto getDefaultInstanceForType() {
5067      return defaultInstance;
5068    }
5069
5070    private final com.google.protobuf.UnknownFieldSet unknownFields;
5071    @java.lang.Override
5072    public final com.google.protobuf.UnknownFieldSet
5073        getUnknownFields() {
5074      return this.unknownFields;
5075    }
5076    private AddRequestProto(
5077        com.google.protobuf.CodedInputStream input,
5078        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5079        throws com.google.protobuf.InvalidProtocolBufferException {
5080      initFields();
5081      int mutable_bitField0_ = 0;
5082      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
5083          com.google.protobuf.UnknownFieldSet.newBuilder();
5084      try {
5085        boolean done = false;
5086        while (!done) {
5087          int tag = input.readTag();
5088          switch (tag) {
5089            case 0:
5090              done = true;
5091              break;
5092            default: {
5093              if (!parseUnknownField(input, unknownFields,
5094                                     extensionRegistry, tag)) {
5095                done = true;
5096              }
5097              break;
5098            }
5099            case 8: {
5100              bitField0_ |= 0x00000001;
5101              param1_ = input.readInt32();
5102              break;
5103            }
5104            case 16: {
5105              bitField0_ |= 0x00000002;
5106              param2_ = input.readInt32();
5107              break;
5108            }
5109          }
5110        }
5111      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
5112        throw e.setUnfinishedMessage(this);
5113      } catch (java.io.IOException e) {
5114        throw new com.google.protobuf.InvalidProtocolBufferException(
5115            e.getMessage()).setUnfinishedMessage(this);
5116      } finally {
5117        this.unknownFields = unknownFields.build();
5118        makeExtensionsImmutable();
5119      }
5120    }
5121    public static final com.google.protobuf.Descriptors.Descriptor
5122        getDescriptor() {
5123      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_AddRequestProto_descriptor;
5124    }
5125
5126    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
5127        internalGetFieldAccessorTable() {
5128      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_AddRequestProto_fieldAccessorTable
5129          .ensureFieldAccessorsInitialized(
5130              org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto.class, org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto.Builder.class);
5131    }
5132
5133    public static com.google.protobuf.Parser<AddRequestProto> PARSER =
5134        new com.google.protobuf.AbstractParser<AddRequestProto>() {
5135      public AddRequestProto parsePartialFrom(
5136          com.google.protobuf.CodedInputStream input,
5137          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5138          throws com.google.protobuf.InvalidProtocolBufferException {
5139        return new AddRequestProto(input, extensionRegistry);
5140      }
5141    };
5142
5143    @java.lang.Override
5144    public com.google.protobuf.Parser<AddRequestProto> getParserForType() {
5145      return PARSER;
5146    }
5147
5148    private int bitField0_;
5149    // required int32 param1 = 1;
5150    public static final int PARAM1_FIELD_NUMBER = 1;
5151    private int param1_;
5152    /**
5153     * <code>required int32 param1 = 1;</code>
5154     */
5155    public boolean hasParam1() {
5156      return ((bitField0_ & 0x00000001) == 0x00000001);
5157    }
5158    /**
5159     * <code>required int32 param1 = 1;</code>
5160     */
5161    public int getParam1() {
5162      return param1_;
5163    }
5164
5165    // required int32 param2 = 2;
5166    public static final int PARAM2_FIELD_NUMBER = 2;
5167    private int param2_;
5168    /**
5169     * <code>required int32 param2 = 2;</code>
5170     */
5171    public boolean hasParam2() {
5172      return ((bitField0_ & 0x00000002) == 0x00000002);
5173    }
5174    /**
5175     * <code>required int32 param2 = 2;</code>
5176     */
5177    public int getParam2() {
5178      return param2_;
5179    }
5180
5181    private void initFields() {
5182      param1_ = 0;
5183      param2_ = 0;
5184    }
5185    private byte memoizedIsInitialized = -1;
5186    public final boolean isInitialized() {
5187      byte isInitialized = memoizedIsInitialized;
5188      if (isInitialized != -1) return isInitialized == 1;
5189
5190      if (!hasParam1()) {
5191        memoizedIsInitialized = 0;
5192        return false;
5193      }
5194      if (!hasParam2()) {
5195        memoizedIsInitialized = 0;
5196        return false;
5197      }
5198      memoizedIsInitialized = 1;
5199      return true;
5200    }
5201
5202    public void writeTo(com.google.protobuf.CodedOutputStream output)
5203                        throws java.io.IOException {
5204      getSerializedSize();
5205      if (((bitField0_ & 0x00000001) == 0x00000001)) {
5206        output.writeInt32(1, param1_);
5207      }
5208      if (((bitField0_ & 0x00000002) == 0x00000002)) {
5209        output.writeInt32(2, param2_);
5210      }
5211      getUnknownFields().writeTo(output);
5212    }
5213
5214    private int memoizedSerializedSize = -1;
5215    public int getSerializedSize() {
5216      int size = memoizedSerializedSize;
5217      if (size != -1) return size;
5218
5219      size = 0;
5220      if (((bitField0_ & 0x00000001) == 0x00000001)) {
5221        size += com.google.protobuf.CodedOutputStream
5222          .computeInt32Size(1, param1_);
5223      }
5224      if (((bitField0_ & 0x00000002) == 0x00000002)) {
5225        size += com.google.protobuf.CodedOutputStream
5226          .computeInt32Size(2, param2_);
5227      }
5228      size += getUnknownFields().getSerializedSize();
5229      memoizedSerializedSize = size;
5230      return size;
5231    }
5232
5233    private static final long serialVersionUID = 0L;
5234    @java.lang.Override
5235    protected java.lang.Object writeReplace()
5236        throws java.io.ObjectStreamException {
5237      return super.writeReplace();
5238    }
5239
5240    @java.lang.Override
5241    public boolean equals(final java.lang.Object obj) {
5242      if (obj == this) {
5243       return true;
5244      }
5245      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto)) {
5246        return super.equals(obj);
5247      }
5248      org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto other = (org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto) obj;
5249
5250      boolean result = true;
5251      result = result && (hasParam1() == other.hasParam1());
5252      if (hasParam1()) {
5253        result = result && (getParam1()
5254            == other.getParam1());
5255      }
5256      result = result && (hasParam2() == other.hasParam2());
5257      if (hasParam2()) {
5258        result = result && (getParam2()
5259            == other.getParam2());
5260      }
5261      result = result &&
5262          getUnknownFields().equals(other.getUnknownFields());
5263      return result;
5264    }
5265
5266    private int memoizedHashCode = 0;
5267    @java.lang.Override
5268    public int hashCode() {
5269      if (memoizedHashCode != 0) {
5270        return memoizedHashCode;
5271      }
5272      int hash = 41;
5273      hash = (19 * hash) + getDescriptorForType().hashCode();
5274      if (hasParam1()) {
5275        hash = (37 * hash) + PARAM1_FIELD_NUMBER;
5276        hash = (53 * hash) + getParam1();
5277      }
5278      if (hasParam2()) {
5279        hash = (37 * hash) + PARAM2_FIELD_NUMBER;
5280        hash = (53 * hash) + getParam2();
5281      }
5282      hash = (29 * hash) + getUnknownFields().hashCode();
5283      memoizedHashCode = hash;
5284      return hash;
5285    }
5286
5287    public static org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto parseFrom(
5288        com.google.protobuf.ByteString data)
5289        throws com.google.protobuf.InvalidProtocolBufferException {
5290      return PARSER.parseFrom(data);
5291    }
5292    public static org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto parseFrom(
5293        com.google.protobuf.ByteString data,
5294        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5295        throws com.google.protobuf.InvalidProtocolBufferException {
5296      return PARSER.parseFrom(data, extensionRegistry);
5297    }
5298    public static org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto parseFrom(byte[] data)
5299        throws com.google.protobuf.InvalidProtocolBufferException {
5300      return PARSER.parseFrom(data);
5301    }
5302    public static org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto parseFrom(
5303        byte[] data,
5304        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5305        throws com.google.protobuf.InvalidProtocolBufferException {
5306      return PARSER.parseFrom(data, extensionRegistry);
5307    }
5308    public static org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto parseFrom(java.io.InputStream input)
5309        throws java.io.IOException {
5310      return PARSER.parseFrom(input);
5311    }
5312    public static org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto parseFrom(
5313        java.io.InputStream input,
5314        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5315        throws java.io.IOException {
5316      return PARSER.parseFrom(input, extensionRegistry);
5317    }
5318    public static org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto parseDelimitedFrom(java.io.InputStream input)
5319        throws java.io.IOException {
5320      return PARSER.parseDelimitedFrom(input);
5321    }
5322    public static org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto parseDelimitedFrom(
5323        java.io.InputStream input,
5324        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5325        throws java.io.IOException {
5326      return PARSER.parseDelimitedFrom(input, extensionRegistry);
5327    }
5328    public static org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto parseFrom(
5329        com.google.protobuf.CodedInputStream input)
5330        throws java.io.IOException {
5331      return PARSER.parseFrom(input);
5332    }
5333    public static org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto parseFrom(
5334        com.google.protobuf.CodedInputStream input,
5335        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5336        throws java.io.IOException {
5337      return PARSER.parseFrom(input, extensionRegistry);
5338    }
5339
5340    public static Builder newBuilder() { return Builder.create(); }
5341    public Builder newBuilderForType() { return newBuilder(); }
5342    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto prototype) {
5343      return newBuilder().mergeFrom(prototype);
5344    }
5345    public Builder toBuilder() { return newBuilder(this); }
5346
5347    @java.lang.Override
5348    protected Builder newBuilderForType(
5349        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
5350      Builder builder = new Builder(parent);
5351      return builder;
5352    }
5353    /**
5354     * Protobuf type {@code hadoop.common.AddRequestProto}
5355     */
5356    public static final class Builder extends
5357        com.google.protobuf.GeneratedMessage.Builder<Builder>
5358       implements org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProtoOrBuilder {
5359      public static final com.google.protobuf.Descriptors.Descriptor
5360          getDescriptor() {
5361        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_AddRequestProto_descriptor;
5362      }
5363
5364      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
5365          internalGetFieldAccessorTable() {
5366        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_AddRequestProto_fieldAccessorTable
5367            .ensureFieldAccessorsInitialized(
5368                org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto.class, org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto.Builder.class);
5369      }
5370
5371      // Construct using org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto.newBuilder()
5372      private Builder() {
5373        maybeForceBuilderInitialization();
5374      }
5375
5376      private Builder(
5377          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
5378        super(parent);
5379        maybeForceBuilderInitialization();
5380      }
5381      private void maybeForceBuilderInitialization() {
5382        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
5383        }
5384      }
5385      private static Builder create() {
5386        return new Builder();
5387      }
5388
5389      public Builder clear() {
5390        super.clear();
5391        param1_ = 0;
5392        bitField0_ = (bitField0_ & ~0x00000001);
5393        param2_ = 0;
5394        bitField0_ = (bitField0_ & ~0x00000002);
5395        return this;
5396      }
5397
5398      public Builder clone() {
5399        return create().mergeFrom(buildPartial());
5400      }
5401
5402      public com.google.protobuf.Descriptors.Descriptor
5403          getDescriptorForType() {
5404        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_AddRequestProto_descriptor;
5405      }
5406
5407      public org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto getDefaultInstanceForType() {
5408        return org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto.getDefaultInstance();
5409      }
5410
5411      public org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto build() {
5412        org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto result = buildPartial();
5413        if (!result.isInitialized()) {
5414          throw newUninitializedMessageException(result);
5415        }
5416        return result;
5417      }
5418
5419      public org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto buildPartial() {
5420        org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto result = new org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto(this);
5421        int from_bitField0_ = bitField0_;
5422        int to_bitField0_ = 0;
5423        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
5424          to_bitField0_ |= 0x00000001;
5425        }
5426        result.param1_ = param1_;
5427        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
5428          to_bitField0_ |= 0x00000002;
5429        }
5430        result.param2_ = param2_;
5431        result.bitField0_ = to_bitField0_;
5432        onBuilt();
5433        return result;
5434      }
5435
5436      public Builder mergeFrom(com.google.protobuf.Message other) {
5437        if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto) {
5438          return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto)other);
5439        } else {
5440          super.mergeFrom(other);
5441          return this;
5442        }
5443      }
5444
5445      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto other) {
5446        if (other == org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto.getDefaultInstance()) return this;
5447        if (other.hasParam1()) {
5448          setParam1(other.getParam1());
5449        }
5450        if (other.hasParam2()) {
5451          setParam2(other.getParam2());
5452        }
5453        this.mergeUnknownFields(other.getUnknownFields());
5454        return this;
5455      }
5456
5457      public final boolean isInitialized() {
5458        if (!hasParam1()) {
5459          
5460          return false;
5461        }
5462        if (!hasParam2()) {
5463          
5464          return false;
5465        }
5466        return true;
5467      }
5468
5469      public Builder mergeFrom(
5470          com.google.protobuf.CodedInputStream input,
5471          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5472          throws java.io.IOException {
5473        org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto parsedMessage = null;
5474        try {
5475          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
5476        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
5477          parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto) e.getUnfinishedMessage();
5478          throw e;
5479        } finally {
5480          if (parsedMessage != null) {
5481            mergeFrom(parsedMessage);
5482          }
5483        }
5484        return this;
5485      }
5486      private int bitField0_;
5487
5488      // required int32 param1 = 1;
5489      private int param1_ ;
5490      /**
5491       * <code>required int32 param1 = 1;</code>
5492       */
5493      public boolean hasParam1() {
5494        return ((bitField0_ & 0x00000001) == 0x00000001);
5495      }
5496      /**
5497       * <code>required int32 param1 = 1;</code>
5498       */
5499      public int getParam1() {
5500        return param1_;
5501      }
5502      /**
5503       * <code>required int32 param1 = 1;</code>
5504       */
5505      public Builder setParam1(int value) {
5506        bitField0_ |= 0x00000001;
5507        param1_ = value;
5508        onChanged();
5509        return this;
5510      }
5511      /**
5512       * <code>required int32 param1 = 1;</code>
5513       */
5514      public Builder clearParam1() {
5515        bitField0_ = (bitField0_ & ~0x00000001);
5516        param1_ = 0;
5517        onChanged();
5518        return this;
5519      }
5520
5521      // required int32 param2 = 2;
5522      private int param2_ ;
5523      /**
5524       * <code>required int32 param2 = 2;</code>
5525       */
5526      public boolean hasParam2() {
5527        return ((bitField0_ & 0x00000002) == 0x00000002);
5528      }
5529      /**
5530       * <code>required int32 param2 = 2;</code>
5531       */
5532      public int getParam2() {
5533        return param2_;
5534      }
5535      /**
5536       * <code>required int32 param2 = 2;</code>
5537       */
5538      public Builder setParam2(int value) {
5539        bitField0_ |= 0x00000002;
5540        param2_ = value;
5541        onChanged();
5542        return this;
5543      }
5544      /**
5545       * <code>required int32 param2 = 2;</code>
5546       */
5547      public Builder clearParam2() {
5548        bitField0_ = (bitField0_ & ~0x00000002);
5549        param2_ = 0;
5550        onChanged();
5551        return this;
5552      }
5553
5554      // @@protoc_insertion_point(builder_scope:hadoop.common.AddRequestProto)
5555    }
5556
5557    static {
5558      defaultInstance = new AddRequestProto(true);
5559      defaultInstance.initFields();
5560    }
5561
5562    // @@protoc_insertion_point(class_scope:hadoop.common.AddRequestProto)
5563  }
5564
5565  public interface AddRequestProto2OrBuilder
5566      extends com.google.protobuf.MessageOrBuilder {
5567
5568    // repeated int32 params = 1;
5569    /**
5570     * <code>repeated int32 params = 1;</code>
5571     */
5572    java.util.List<java.lang.Integer> getParamsList();
5573    /**
5574     * <code>repeated int32 params = 1;</code>
5575     */
5576    int getParamsCount();
5577    /**
5578     * <code>repeated int32 params = 1;</code>
5579     */
5580    int getParams(int index);
5581  }
5582  /**
5583   * Protobuf type {@code hadoop.common.AddRequestProto2}
5584   */
5585  public static final class AddRequestProto2 extends
5586      com.google.protobuf.GeneratedMessage
5587      implements AddRequestProto2OrBuilder {
5588    // Use AddRequestProto2.newBuilder() to construct.
5589    private AddRequestProto2(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
5590      super(builder);
5591      this.unknownFields = builder.getUnknownFields();
5592    }
5593    private AddRequestProto2(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
5594
5595    private static final AddRequestProto2 defaultInstance;
5596    public static AddRequestProto2 getDefaultInstance() {
5597      return defaultInstance;
5598    }
5599
5600    public AddRequestProto2 getDefaultInstanceForType() {
5601      return defaultInstance;
5602    }
5603
5604    private final com.google.protobuf.UnknownFieldSet unknownFields;
5605    @java.lang.Override
5606    public final com.google.protobuf.UnknownFieldSet
5607        getUnknownFields() {
5608      return this.unknownFields;
5609    }
5610    private AddRequestProto2(
5611        com.google.protobuf.CodedInputStream input,
5612        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5613        throws com.google.protobuf.InvalidProtocolBufferException {
5614      initFields();
5615      int mutable_bitField0_ = 0;
5616      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
5617          com.google.protobuf.UnknownFieldSet.newBuilder();
5618      try {
5619        boolean done = false;
5620        while (!done) {
5621          int tag = input.readTag();
5622          switch (tag) {
5623            case 0:
5624              done = true;
5625              break;
5626            default: {
5627              if (!parseUnknownField(input, unknownFields,
5628                                     extensionRegistry, tag)) {
5629                done = true;
5630              }
5631              break;
5632            }
5633            case 8: {
5634              if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
5635                params_ = new java.util.ArrayList<java.lang.Integer>();
5636                mutable_bitField0_ |= 0x00000001;
5637              }
5638              params_.add(input.readInt32());
5639              break;
5640            }
5641            case 10: {
5642              int length = input.readRawVarint32();
5643              int limit = input.pushLimit(length);
5644              if (!((mutable_bitField0_ & 0x00000001) == 0x00000001) && input.getBytesUntilLimit() > 0) {
5645                params_ = new java.util.ArrayList<java.lang.Integer>();
5646                mutable_bitField0_ |= 0x00000001;
5647              }
5648              while (input.getBytesUntilLimit() > 0) {
5649                params_.add(input.readInt32());
5650              }
5651              input.popLimit(limit);
5652              break;
5653            }
5654          }
5655        }
5656      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
5657        throw e.setUnfinishedMessage(this);
5658      } catch (java.io.IOException e) {
5659        throw new com.google.protobuf.InvalidProtocolBufferException(
5660            e.getMessage()).setUnfinishedMessage(this);
5661      } finally {
5662        if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
5663          params_ = java.util.Collections.unmodifiableList(params_);
5664        }
5665        this.unknownFields = unknownFields.build();
5666        makeExtensionsImmutable();
5667      }
5668    }
5669    public static final com.google.protobuf.Descriptors.Descriptor
5670        getDescriptor() {
5671      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_AddRequestProto2_descriptor;
5672    }
5673
5674    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
5675        internalGetFieldAccessorTable() {
5676      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_AddRequestProto2_fieldAccessorTable
5677          .ensureFieldAccessorsInitialized(
5678              org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2.class, org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2.Builder.class);
5679    }
5680
5681    public static com.google.protobuf.Parser<AddRequestProto2> PARSER =
5682        new com.google.protobuf.AbstractParser<AddRequestProto2>() {
5683      public AddRequestProto2 parsePartialFrom(
5684          com.google.protobuf.CodedInputStream input,
5685          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5686          throws com.google.protobuf.InvalidProtocolBufferException {
5687        return new AddRequestProto2(input, extensionRegistry);
5688      }
5689    };
5690
5691    @java.lang.Override
5692    public com.google.protobuf.Parser<AddRequestProto2> getParserForType() {
5693      return PARSER;
5694    }
5695
5696    // repeated int32 params = 1;
5697    public static final int PARAMS_FIELD_NUMBER = 1;
5698    private java.util.List<java.lang.Integer> params_;
5699    /**
5700     * <code>repeated int32 params = 1;</code>
5701     */
5702    public java.util.List<java.lang.Integer>
5703        getParamsList() {
5704      return params_;
5705    }
5706    /**
5707     * <code>repeated int32 params = 1;</code>
5708     */
5709    public int getParamsCount() {
5710      return params_.size();
5711    }
5712    /**
5713     * <code>repeated int32 params = 1;</code>
5714     */
5715    public int getParams(int index) {
5716      return params_.get(index);
5717    }
5718
5719    private void initFields() {
5720      params_ = java.util.Collections.emptyList();
5721    }
5722    private byte memoizedIsInitialized = -1;
5723    public final boolean isInitialized() {
5724      byte isInitialized = memoizedIsInitialized;
5725      if (isInitialized != -1) return isInitialized == 1;
5726
5727      memoizedIsInitialized = 1;
5728      return true;
5729    }
5730
5731    public void writeTo(com.google.protobuf.CodedOutputStream output)
5732                        throws java.io.IOException {
5733      getSerializedSize();
5734      for (int i = 0; i < params_.size(); i++) {
5735        output.writeInt32(1, params_.get(i));
5736      }
5737      getUnknownFields().writeTo(output);
5738    }
5739
5740    private int memoizedSerializedSize = -1;
5741    public int getSerializedSize() {
5742      int size = memoizedSerializedSize;
5743      if (size != -1) return size;
5744
5745      size = 0;
5746      {
5747        int dataSize = 0;
5748        for (int i = 0; i < params_.size(); i++) {
5749          dataSize += com.google.protobuf.CodedOutputStream
5750            .computeInt32SizeNoTag(params_.get(i));
5751        }
5752        size += dataSize;
5753        size += 1 * getParamsList().size();
5754      }
5755      size += getUnknownFields().getSerializedSize();
5756      memoizedSerializedSize = size;
5757      return size;
5758    }
5759
5760    private static final long serialVersionUID = 0L;
5761    @java.lang.Override
5762    protected java.lang.Object writeReplace()
5763        throws java.io.ObjectStreamException {
5764      return super.writeReplace();
5765    }
5766
5767    @java.lang.Override
5768    public boolean equals(final java.lang.Object obj) {
5769      if (obj == this) {
5770       return true;
5771      }
5772      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2)) {
5773        return super.equals(obj);
5774      }
5775      org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 other = (org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2) obj;
5776
5777      boolean result = true;
5778      result = result && getParamsList()
5779          .equals(other.getParamsList());
5780      result = result &&
5781          getUnknownFields().equals(other.getUnknownFields());
5782      return result;
5783    }
5784
5785    private int memoizedHashCode = 0;
5786    @java.lang.Override
5787    public int hashCode() {
5788      if (memoizedHashCode != 0) {
5789        return memoizedHashCode;
5790      }
5791      int hash = 41;
5792      hash = (19 * hash) + getDescriptorForType().hashCode();
5793      if (getParamsCount() > 0) {
5794        hash = (37 * hash) + PARAMS_FIELD_NUMBER;
5795        hash = (53 * hash) + getParamsList().hashCode();
5796      }
5797      hash = (29 * hash) + getUnknownFields().hashCode();
5798      memoizedHashCode = hash;
5799      return hash;
5800    }
5801
5802    public static org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 parseFrom(
5803        com.google.protobuf.ByteString data)
5804        throws com.google.protobuf.InvalidProtocolBufferException {
5805      return PARSER.parseFrom(data);
5806    }
5807    public static org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 parseFrom(
5808        com.google.protobuf.ByteString data,
5809        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5810        throws com.google.protobuf.InvalidProtocolBufferException {
5811      return PARSER.parseFrom(data, extensionRegistry);
5812    }
5813    public static org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 parseFrom(byte[] data)
5814        throws com.google.protobuf.InvalidProtocolBufferException {
5815      return PARSER.parseFrom(data);
5816    }
5817    public static org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 parseFrom(
5818        byte[] data,
5819        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5820        throws com.google.protobuf.InvalidProtocolBufferException {
5821      return PARSER.parseFrom(data, extensionRegistry);
5822    }
5823    public static org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 parseFrom(java.io.InputStream input)
5824        throws java.io.IOException {
5825      return PARSER.parseFrom(input);
5826    }
5827    public static org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 parseFrom(
5828        java.io.InputStream input,
5829        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5830        throws java.io.IOException {
5831      return PARSER.parseFrom(input, extensionRegistry);
5832    }
5833    public static org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 parseDelimitedFrom(java.io.InputStream input)
5834        throws java.io.IOException {
5835      return PARSER.parseDelimitedFrom(input);
5836    }
5837    public static org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 parseDelimitedFrom(
5838        java.io.InputStream input,
5839        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5840        throws java.io.IOException {
5841      return PARSER.parseDelimitedFrom(input, extensionRegistry);
5842    }
5843    public static org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 parseFrom(
5844        com.google.protobuf.CodedInputStream input)
5845        throws java.io.IOException {
5846      return PARSER.parseFrom(input);
5847    }
5848    public static org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 parseFrom(
5849        com.google.protobuf.CodedInputStream input,
5850        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5851        throws java.io.IOException {
5852      return PARSER.parseFrom(input, extensionRegistry);
5853    }
5854
5855    public static Builder newBuilder() { return Builder.create(); }
5856    public Builder newBuilderForType() { return newBuilder(); }
5857    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 prototype) {
5858      return newBuilder().mergeFrom(prototype);
5859    }
5860    public Builder toBuilder() { return newBuilder(this); }
5861
5862    @java.lang.Override
5863    protected Builder newBuilderForType(
5864        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
5865      Builder builder = new Builder(parent);
5866      return builder;
5867    }
5868    /**
5869     * Protobuf type {@code hadoop.common.AddRequestProto2}
5870     */
5871    public static final class Builder extends
5872        com.google.protobuf.GeneratedMessage.Builder<Builder>
5873       implements org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2OrBuilder {
5874      public static final com.google.protobuf.Descriptors.Descriptor
5875          getDescriptor() {
5876        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_AddRequestProto2_descriptor;
5877      }
5878
5879      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
5880          internalGetFieldAccessorTable() {
5881        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_AddRequestProto2_fieldAccessorTable
5882            .ensureFieldAccessorsInitialized(
5883                org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2.class, org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2.Builder.class);
5884      }
5885
5886      // Construct using org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2.newBuilder()
5887      private Builder() {
5888        maybeForceBuilderInitialization();
5889      }
5890
5891      private Builder(
5892          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
5893        super(parent);
5894        maybeForceBuilderInitialization();
5895      }
5896      private void maybeForceBuilderInitialization() {
5897        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
5898        }
5899      }
5900      private static Builder create() {
5901        return new Builder();
5902      }
5903
5904      public Builder clear() {
5905        super.clear();
5906        params_ = java.util.Collections.emptyList();
5907        bitField0_ = (bitField0_ & ~0x00000001);
5908        return this;
5909      }
5910
5911      public Builder clone() {
5912        return create().mergeFrom(buildPartial());
5913      }
5914
5915      public com.google.protobuf.Descriptors.Descriptor
5916          getDescriptorForType() {
5917        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_AddRequestProto2_descriptor;
5918      }
5919
5920      public org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 getDefaultInstanceForType() {
5921        return org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2.getDefaultInstance();
5922      }
5923
5924      public org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 build() {
5925        org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 result = buildPartial();
5926        if (!result.isInitialized()) {
5927          throw newUninitializedMessageException(result);
5928        }
5929        return result;
5930      }
5931
5932      public org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 buildPartial() {
5933        org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 result = new org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2(this);
5934        int from_bitField0_ = bitField0_;
5935        if (((bitField0_ & 0x00000001) == 0x00000001)) {
5936          params_ = java.util.Collections.unmodifiableList(params_);
5937          bitField0_ = (bitField0_ & ~0x00000001);
5938        }
5939        result.params_ = params_;
5940        onBuilt();
5941        return result;
5942      }
5943
5944      public Builder mergeFrom(com.google.protobuf.Message other) {
5945        if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2) {
5946          return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2)other);
5947        } else {
5948          super.mergeFrom(other);
5949          return this;
5950        }
5951      }
5952
5953      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 other) {
5954        if (other == org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2.getDefaultInstance()) return this;
5955        if (!other.params_.isEmpty()) {
5956          if (params_.isEmpty()) {
5957            params_ = other.params_;
5958            bitField0_ = (bitField0_ & ~0x00000001);
5959          } else {
5960            ensureParamsIsMutable();
5961            params_.addAll(other.params_);
5962          }
5963          onChanged();
5964        }
5965        this.mergeUnknownFields(other.getUnknownFields());
5966        return this;
5967      }
5968
5969      public final boolean isInitialized() {
5970        return true;
5971      }
5972
5973      public Builder mergeFrom(
5974          com.google.protobuf.CodedInputStream input,
5975          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5976          throws java.io.IOException {
5977        org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 parsedMessage = null;
5978        try {
5979          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
5980        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
5981          parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2) e.getUnfinishedMessage();
5982          throw e;
5983        } finally {
5984          if (parsedMessage != null) {
5985            mergeFrom(parsedMessage);
5986          }
5987        }
5988        return this;
5989      }
5990      private int bitField0_;
5991
5992      // repeated int32 params = 1;
5993      private java.util.List<java.lang.Integer> params_ = java.util.Collections.emptyList();
5994      private void ensureParamsIsMutable() {
5995        if (!((bitField0_ & 0x00000001) == 0x00000001)) {
5996          params_ = new java.util.ArrayList<java.lang.Integer>(params_);
5997          bitField0_ |= 0x00000001;
5998         }
5999      }
6000      /**
6001       * <code>repeated int32 params = 1;</code>
6002       */
6003      public java.util.List<java.lang.Integer>
6004          getParamsList() {
6005        return java.util.Collections.unmodifiableList(params_);
6006      }
6007      /**
6008       * <code>repeated int32 params = 1;</code>
6009       */
6010      public int getParamsCount() {
6011        return params_.size();
6012      }
6013      /**
6014       * <code>repeated int32 params = 1;</code>
6015       */
6016      public int getParams(int index) {
6017        return params_.get(index);
6018      }
6019      /**
6020       * <code>repeated int32 params = 1;</code>
6021       */
6022      public Builder setParams(
6023          int index, int value) {
6024        ensureParamsIsMutable();
6025        params_.set(index, value);
6026        onChanged();
6027        return this;
6028      }
6029      /**
6030       * <code>repeated int32 params = 1;</code>
6031       */
6032      public Builder addParams(int value) {
6033        ensureParamsIsMutable();
6034        params_.add(value);
6035        onChanged();
6036        return this;
6037      }
6038      /**
6039       * <code>repeated int32 params = 1;</code>
6040       */
6041      public Builder addAllParams(
6042          java.lang.Iterable<? extends java.lang.Integer> values) {
6043        ensureParamsIsMutable();
6044        super.addAll(values, params_);
6045        onChanged();
6046        return this;
6047      }
6048      /**
6049       * <code>repeated int32 params = 1;</code>
6050       */
6051      public Builder clearParams() {
6052        params_ = java.util.Collections.emptyList();
6053        bitField0_ = (bitField0_ & ~0x00000001);
6054        onChanged();
6055        return this;
6056      }
6057
6058      // @@protoc_insertion_point(builder_scope:hadoop.common.AddRequestProto2)
6059    }
6060
6061    static {
6062      defaultInstance = new AddRequestProto2(true);
6063      defaultInstance.initFields();
6064    }
6065
6066    // @@protoc_insertion_point(class_scope:hadoop.common.AddRequestProto2)
6067  }
6068
6069  public interface AddResponseProtoOrBuilder
6070      extends com.google.protobuf.MessageOrBuilder {
6071
6072    // required int32 result = 1;
6073    /**
6074     * <code>required int32 result = 1;</code>
6075     */
6076    boolean hasResult();
6077    /**
6078     * <code>required int32 result = 1;</code>
6079     */
6080    int getResult();
6081  }
6082  /**
6083   * Protobuf type {@code hadoop.common.AddResponseProto}
6084   */
6085  public static final class AddResponseProto extends
6086      com.google.protobuf.GeneratedMessage
6087      implements AddResponseProtoOrBuilder {
6088    // Use AddResponseProto.newBuilder() to construct.
6089    private AddResponseProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
6090      super(builder);
6091      this.unknownFields = builder.getUnknownFields();
6092    }
6093    private AddResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
6094
6095    private static final AddResponseProto defaultInstance;
6096    public static AddResponseProto getDefaultInstance() {
6097      return defaultInstance;
6098    }
6099
6100    public AddResponseProto getDefaultInstanceForType() {
6101      return defaultInstance;
6102    }
6103
6104    private final com.google.protobuf.UnknownFieldSet unknownFields;
6105    @java.lang.Override
6106    public final com.google.protobuf.UnknownFieldSet
6107        getUnknownFields() {
6108      return this.unknownFields;
6109    }
6110    private AddResponseProto(
6111        com.google.protobuf.CodedInputStream input,
6112        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6113        throws com.google.protobuf.InvalidProtocolBufferException {
6114      initFields();
6115      int mutable_bitField0_ = 0;
6116      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
6117          com.google.protobuf.UnknownFieldSet.newBuilder();
6118      try {
6119        boolean done = false;
6120        while (!done) {
6121          int tag = input.readTag();
6122          switch (tag) {
6123            case 0:
6124              done = true;
6125              break;
6126            default: {
6127              if (!parseUnknownField(input, unknownFields,
6128                                     extensionRegistry, tag)) {
6129                done = true;
6130              }
6131              break;
6132            }
6133            case 8: {
6134              bitField0_ |= 0x00000001;
6135              result_ = input.readInt32();
6136              break;
6137            }
6138          }
6139        }
6140      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
6141        throw e.setUnfinishedMessage(this);
6142      } catch (java.io.IOException e) {
6143        throw new com.google.protobuf.InvalidProtocolBufferException(
6144            e.getMessage()).setUnfinishedMessage(this);
6145      } finally {
6146        this.unknownFields = unknownFields.build();
6147        makeExtensionsImmutable();
6148      }
6149    }
6150    public static final com.google.protobuf.Descriptors.Descriptor
6151        getDescriptor() {
6152      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_AddResponseProto_descriptor;
6153    }
6154
6155    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
6156        internalGetFieldAccessorTable() {
6157      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_AddResponseProto_fieldAccessorTable
6158          .ensureFieldAccessorsInitialized(
6159              org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto.Builder.class);
6160    }
6161
6162    public static com.google.protobuf.Parser<AddResponseProto> PARSER =
6163        new com.google.protobuf.AbstractParser<AddResponseProto>() {
6164      public AddResponseProto parsePartialFrom(
6165          com.google.protobuf.CodedInputStream input,
6166          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6167          throws com.google.protobuf.InvalidProtocolBufferException {
6168        return new AddResponseProto(input, extensionRegistry);
6169      }
6170    };
6171
6172    @java.lang.Override
6173    public com.google.protobuf.Parser<AddResponseProto> getParserForType() {
6174      return PARSER;
6175    }
6176
6177    private int bitField0_;
6178    // required int32 result = 1;
6179    public static final int RESULT_FIELD_NUMBER = 1;
6180    private int result_;
6181    /**
6182     * <code>required int32 result = 1;</code>
6183     */
6184    public boolean hasResult() {
6185      return ((bitField0_ & 0x00000001) == 0x00000001);
6186    }
6187    /**
6188     * <code>required int32 result = 1;</code>
6189     */
6190    public int getResult() {
6191      return result_;
6192    }
6193
6194    private void initFields() {
6195      result_ = 0;
6196    }
6197    private byte memoizedIsInitialized = -1;
6198    public final boolean isInitialized() {
6199      byte isInitialized = memoizedIsInitialized;
6200      if (isInitialized != -1) return isInitialized == 1;
6201
6202      if (!hasResult()) {
6203        memoizedIsInitialized = 0;
6204        return false;
6205      }
6206      memoizedIsInitialized = 1;
6207      return true;
6208    }
6209
6210    public void writeTo(com.google.protobuf.CodedOutputStream output)
6211                        throws java.io.IOException {
6212      getSerializedSize();
6213      if (((bitField0_ & 0x00000001) == 0x00000001)) {
6214        output.writeInt32(1, result_);
6215      }
6216      getUnknownFields().writeTo(output);
6217    }
6218
6219    private int memoizedSerializedSize = -1;
6220    public int getSerializedSize() {
6221      int size = memoizedSerializedSize;
6222      if (size != -1) return size;
6223
6224      size = 0;
6225      if (((bitField0_ & 0x00000001) == 0x00000001)) {
6226        size += com.google.protobuf.CodedOutputStream
6227          .computeInt32Size(1, result_);
6228      }
6229      size += getUnknownFields().getSerializedSize();
6230      memoizedSerializedSize = size;
6231      return size;
6232    }
6233
6234    private static final long serialVersionUID = 0L;
6235    @java.lang.Override
6236    protected java.lang.Object writeReplace()
6237        throws java.io.ObjectStreamException {
6238      return super.writeReplace();
6239    }
6240
6241    @java.lang.Override
6242    public boolean equals(final java.lang.Object obj) {
6243      if (obj == this) {
6244       return true;
6245      }
6246      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto)) {
6247        return super.equals(obj);
6248      }
6249      org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto other = (org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto) obj;
6250
6251      boolean result = true;
6252      result = result && (hasResult() == other.hasResult());
6253      if (hasResult()) {
6254        result = result && (getResult()
6255            == other.getResult());
6256      }
6257      result = result &&
6258          getUnknownFields().equals(other.getUnknownFields());
6259      return result;
6260    }
6261
6262    private int memoizedHashCode = 0;
6263    @java.lang.Override
6264    public int hashCode() {
6265      if (memoizedHashCode != 0) {
6266        return memoizedHashCode;
6267      }
6268      int hash = 41;
6269      hash = (19 * hash) + getDescriptorForType().hashCode();
6270      if (hasResult()) {
6271        hash = (37 * hash) + RESULT_FIELD_NUMBER;
6272        hash = (53 * hash) + getResult();
6273      }
6274      hash = (29 * hash) + getUnknownFields().hashCode();
6275      memoizedHashCode = hash;
6276      return hash;
6277    }
6278
6279    public static org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto parseFrom(
6280        com.google.protobuf.ByteString data)
6281        throws com.google.protobuf.InvalidProtocolBufferException {
6282      return PARSER.parseFrom(data);
6283    }
6284    public static org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto parseFrom(
6285        com.google.protobuf.ByteString data,
6286        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6287        throws com.google.protobuf.InvalidProtocolBufferException {
6288      return PARSER.parseFrom(data, extensionRegistry);
6289    }
6290    public static org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto parseFrom(byte[] data)
6291        throws com.google.protobuf.InvalidProtocolBufferException {
6292      return PARSER.parseFrom(data);
6293    }
6294    public static org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto parseFrom(
6295        byte[] data,
6296        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6297        throws com.google.protobuf.InvalidProtocolBufferException {
6298      return PARSER.parseFrom(data, extensionRegistry);
6299    }
6300    public static org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto parseFrom(java.io.InputStream input)
6301        throws java.io.IOException {
6302      return PARSER.parseFrom(input);
6303    }
6304    public static org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto parseFrom(
6305        java.io.InputStream input,
6306        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6307        throws java.io.IOException {
6308      return PARSER.parseFrom(input, extensionRegistry);
6309    }
6310    public static org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto parseDelimitedFrom(java.io.InputStream input)
6311        throws java.io.IOException {
6312      return PARSER.parseDelimitedFrom(input);
6313    }
6314    public static org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto parseDelimitedFrom(
6315        java.io.InputStream input,
6316        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6317        throws java.io.IOException {
6318      return PARSER.parseDelimitedFrom(input, extensionRegistry);
6319    }
6320    public static org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto parseFrom(
6321        com.google.protobuf.CodedInputStream input)
6322        throws java.io.IOException {
6323      return PARSER.parseFrom(input);
6324    }
6325    public static org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto parseFrom(
6326        com.google.protobuf.CodedInputStream input,
6327        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6328        throws java.io.IOException {
6329      return PARSER.parseFrom(input, extensionRegistry);
6330    }
6331
6332    public static Builder newBuilder() { return Builder.create(); }
6333    public Builder newBuilderForType() { return newBuilder(); }
6334    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto prototype) {
6335      return newBuilder().mergeFrom(prototype);
6336    }
6337    public Builder toBuilder() { return newBuilder(this); }
6338
6339    @java.lang.Override
6340    protected Builder newBuilderForType(
6341        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
6342      Builder builder = new Builder(parent);
6343      return builder;
6344    }
6345    /**
6346     * Protobuf type {@code hadoop.common.AddResponseProto}
6347     */
6348    public static final class Builder extends
6349        com.google.protobuf.GeneratedMessage.Builder<Builder>
6350       implements org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProtoOrBuilder {
6351      public static final com.google.protobuf.Descriptors.Descriptor
6352          getDescriptor() {
6353        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_AddResponseProto_descriptor;
6354      }
6355
6356      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
6357          internalGetFieldAccessorTable() {
6358        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_AddResponseProto_fieldAccessorTable
6359            .ensureFieldAccessorsInitialized(
6360                org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto.Builder.class);
6361      }
6362
6363      // Construct using org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto.newBuilder()
6364      private Builder() {
6365        maybeForceBuilderInitialization();
6366      }
6367
6368      private Builder(
6369          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
6370        super(parent);
6371        maybeForceBuilderInitialization();
6372      }
6373      private void maybeForceBuilderInitialization() {
6374        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
6375        }
6376      }
6377      private static Builder create() {
6378        return new Builder();
6379      }
6380
6381      public Builder clear() {
6382        super.clear();
6383        result_ = 0;
6384        bitField0_ = (bitField0_ & ~0x00000001);
6385        return this;
6386      }
6387
6388      public Builder clone() {
6389        return create().mergeFrom(buildPartial());
6390      }
6391
6392      public com.google.protobuf.Descriptors.Descriptor
6393          getDescriptorForType() {
6394        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_AddResponseProto_descriptor;
6395      }
6396
6397      public org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto getDefaultInstanceForType() {
6398        return org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto.getDefaultInstance();
6399      }
6400
6401      public org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto build() {
6402        org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto result = buildPartial();
6403        if (!result.isInitialized()) {
6404          throw newUninitializedMessageException(result);
6405        }
6406        return result;
6407      }
6408
6409      public org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto buildPartial() {
6410        org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto result = new org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto(this);
6411        int from_bitField0_ = bitField0_;
6412        int to_bitField0_ = 0;
6413        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
6414          to_bitField0_ |= 0x00000001;
6415        }
6416        result.result_ = result_;
6417        result.bitField0_ = to_bitField0_;
6418        onBuilt();
6419        return result;
6420      }
6421
6422      public Builder mergeFrom(com.google.protobuf.Message other) {
6423        if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto) {
6424          return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto)other);
6425        } else {
6426          super.mergeFrom(other);
6427          return this;
6428        }
6429      }
6430
6431      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto other) {
6432        if (other == org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto.getDefaultInstance()) return this;
6433        if (other.hasResult()) {
6434          setResult(other.getResult());
6435        }
6436        this.mergeUnknownFields(other.getUnknownFields());
6437        return this;
6438      }
6439
6440      public final boolean isInitialized() {
6441        if (!hasResult()) {
6442          
6443          return false;
6444        }
6445        return true;
6446      }
6447
6448      public Builder mergeFrom(
6449          com.google.protobuf.CodedInputStream input,
6450          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6451          throws java.io.IOException {
6452        org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto parsedMessage = null;
6453        try {
6454          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
6455        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
6456          parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto) e.getUnfinishedMessage();
6457          throw e;
6458        } finally {
6459          if (parsedMessage != null) {
6460            mergeFrom(parsedMessage);
6461          }
6462        }
6463        return this;
6464      }
6465      private int bitField0_;
6466
6467      // required int32 result = 1;
6468      private int result_ ;
6469      /**
6470       * <code>required int32 result = 1;</code>
6471       */
6472      public boolean hasResult() {
6473        return ((bitField0_ & 0x00000001) == 0x00000001);
6474      }
6475      /**
6476       * <code>required int32 result = 1;</code>
6477       */
6478      public int getResult() {
6479        return result_;
6480      }
6481      /**
6482       * <code>required int32 result = 1;</code>
6483       */
6484      public Builder setResult(int value) {
6485        bitField0_ |= 0x00000001;
6486        result_ = value;
6487        onChanged();
6488        return this;
6489      }
6490      /**
6491       * <code>required int32 result = 1;</code>
6492       */
6493      public Builder clearResult() {
6494        bitField0_ = (bitField0_ & ~0x00000001);
6495        result_ = 0;
6496        onChanged();
6497        return this;
6498      }
6499
6500      // @@protoc_insertion_point(builder_scope:hadoop.common.AddResponseProto)
6501    }
6502
6503    static {
6504      defaultInstance = new AddResponseProto(true);
6505      defaultInstance.initFields();
6506    }
6507
6508    // @@protoc_insertion_point(class_scope:hadoop.common.AddResponseProto)
6509  }
6510
6511  public interface ExchangeRequestProtoOrBuilder
6512      extends com.google.protobuf.MessageOrBuilder {
6513
6514    // repeated int32 values = 1;
6515    /**
6516     * <code>repeated int32 values = 1;</code>
6517     */
6518    java.util.List<java.lang.Integer> getValuesList();
6519    /**
6520     * <code>repeated int32 values = 1;</code>
6521     */
6522    int getValuesCount();
6523    /**
6524     * <code>repeated int32 values = 1;</code>
6525     */
6526    int getValues(int index);
6527  }
6528  /**
6529   * Protobuf type {@code hadoop.common.ExchangeRequestProto}
6530   */
6531  public static final class ExchangeRequestProto extends
6532      com.google.protobuf.GeneratedMessage
6533      implements ExchangeRequestProtoOrBuilder {
6534    // Use ExchangeRequestProto.newBuilder() to construct.
6535    private ExchangeRequestProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
6536      super(builder);
6537      this.unknownFields = builder.getUnknownFields();
6538    }
6539    private ExchangeRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
6540
6541    private static final ExchangeRequestProto defaultInstance;
6542    public static ExchangeRequestProto getDefaultInstance() {
6543      return defaultInstance;
6544    }
6545
6546    public ExchangeRequestProto getDefaultInstanceForType() {
6547      return defaultInstance;
6548    }
6549
6550    private final com.google.protobuf.UnknownFieldSet unknownFields;
6551    @java.lang.Override
6552    public final com.google.protobuf.UnknownFieldSet
6553        getUnknownFields() {
6554      return this.unknownFields;
6555    }
6556    private ExchangeRequestProto(
6557        com.google.protobuf.CodedInputStream input,
6558        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6559        throws com.google.protobuf.InvalidProtocolBufferException {
6560      initFields();
6561      int mutable_bitField0_ = 0;
6562      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
6563          com.google.protobuf.UnknownFieldSet.newBuilder();
6564      try {
6565        boolean done = false;
6566        while (!done) {
6567          int tag = input.readTag();
6568          switch (tag) {
6569            case 0:
6570              done = true;
6571              break;
6572            default: {
6573              if (!parseUnknownField(input, unknownFields,
6574                                     extensionRegistry, tag)) {
6575                done = true;
6576              }
6577              break;
6578            }
6579            case 8: {
6580              if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
6581                values_ = new java.util.ArrayList<java.lang.Integer>();
6582                mutable_bitField0_ |= 0x00000001;
6583              }
6584              values_.add(input.readInt32());
6585              break;
6586            }
6587            case 10: {
6588              int length = input.readRawVarint32();
6589              int limit = input.pushLimit(length);
6590              if (!((mutable_bitField0_ & 0x00000001) == 0x00000001) && input.getBytesUntilLimit() > 0) {
6591                values_ = new java.util.ArrayList<java.lang.Integer>();
6592                mutable_bitField0_ |= 0x00000001;
6593              }
6594              while (input.getBytesUntilLimit() > 0) {
6595                values_.add(input.readInt32());
6596              }
6597              input.popLimit(limit);
6598              break;
6599            }
6600          }
6601        }
6602      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
6603        throw e.setUnfinishedMessage(this);
6604      } catch (java.io.IOException e) {
6605        throw new com.google.protobuf.InvalidProtocolBufferException(
6606            e.getMessage()).setUnfinishedMessage(this);
6607      } finally {
6608        if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
6609          values_ = java.util.Collections.unmodifiableList(values_);
6610        }
6611        this.unknownFields = unknownFields.build();
6612        makeExtensionsImmutable();
6613      }
6614    }
6615    public static final com.google.protobuf.Descriptors.Descriptor
6616        getDescriptor() {
6617      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_ExchangeRequestProto_descriptor;
6618    }
6619
6620    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
6621        internalGetFieldAccessorTable() {
6622      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_ExchangeRequestProto_fieldAccessorTable
6623          .ensureFieldAccessorsInitialized(
6624              org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto.class, org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto.Builder.class);
6625    }
6626
6627    public static com.google.protobuf.Parser<ExchangeRequestProto> PARSER =
6628        new com.google.protobuf.AbstractParser<ExchangeRequestProto>() {
6629      public ExchangeRequestProto parsePartialFrom(
6630          com.google.protobuf.CodedInputStream input,
6631          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6632          throws com.google.protobuf.InvalidProtocolBufferException {
6633        return new ExchangeRequestProto(input, extensionRegistry);
6634      }
6635    };
6636
6637    @java.lang.Override
6638    public com.google.protobuf.Parser<ExchangeRequestProto> getParserForType() {
6639      return PARSER;
6640    }
6641
6642    // repeated int32 values = 1;
6643    public static final int VALUES_FIELD_NUMBER = 1;
6644    private java.util.List<java.lang.Integer> values_;
6645    /**
6646     * <code>repeated int32 values = 1;</code>
6647     */
6648    public java.util.List<java.lang.Integer>
6649        getValuesList() {
6650      return values_;
6651    }
6652    /**
6653     * <code>repeated int32 values = 1;</code>
6654     */
6655    public int getValuesCount() {
6656      return values_.size();
6657    }
6658    /**
6659     * <code>repeated int32 values = 1;</code>
6660     */
6661    public int getValues(int index) {
6662      return values_.get(index);
6663    }
6664
6665    private void initFields() {
6666      values_ = java.util.Collections.emptyList();
6667    }
6668    private byte memoizedIsInitialized = -1;
6669    public final boolean isInitialized() {
6670      byte isInitialized = memoizedIsInitialized;
6671      if (isInitialized != -1) return isInitialized == 1;
6672
6673      memoizedIsInitialized = 1;
6674      return true;
6675    }
6676
6677    public void writeTo(com.google.protobuf.CodedOutputStream output)
6678                        throws java.io.IOException {
6679      getSerializedSize();
6680      for (int i = 0; i < values_.size(); i++) {
6681        output.writeInt32(1, values_.get(i));
6682      }
6683      getUnknownFields().writeTo(output);
6684    }
6685
6686    private int memoizedSerializedSize = -1;
6687    public int getSerializedSize() {
6688      int size = memoizedSerializedSize;
6689      if (size != -1) return size;
6690
6691      size = 0;
6692      {
6693        int dataSize = 0;
6694        for (int i = 0; i < values_.size(); i++) {
6695          dataSize += com.google.protobuf.CodedOutputStream
6696            .computeInt32SizeNoTag(values_.get(i));
6697        }
6698        size += dataSize;
6699        size += 1 * getValuesList().size();
6700      }
6701      size += getUnknownFields().getSerializedSize();
6702      memoizedSerializedSize = size;
6703      return size;
6704    }
6705
6706    private static final long serialVersionUID = 0L;
6707    @java.lang.Override
6708    protected java.lang.Object writeReplace()
6709        throws java.io.ObjectStreamException {
6710      return super.writeReplace();
6711    }
6712
6713    @java.lang.Override
6714    public boolean equals(final java.lang.Object obj) {
6715      if (obj == this) {
6716       return true;
6717      }
6718      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto)) {
6719        return super.equals(obj);
6720      }
6721      org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto other = (org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto) obj;
6722
6723      boolean result = true;
6724      result = result && getValuesList()
6725          .equals(other.getValuesList());
6726      result = result &&
6727          getUnknownFields().equals(other.getUnknownFields());
6728      return result;
6729    }
6730
6731    private int memoizedHashCode = 0;
6732    @java.lang.Override
6733    public int hashCode() {
6734      if (memoizedHashCode != 0) {
6735        return memoizedHashCode;
6736      }
6737      int hash = 41;
6738      hash = (19 * hash) + getDescriptorForType().hashCode();
6739      if (getValuesCount() > 0) {
6740        hash = (37 * hash) + VALUES_FIELD_NUMBER;
6741        hash = (53 * hash) + getValuesList().hashCode();
6742      }
6743      hash = (29 * hash) + getUnknownFields().hashCode();
6744      memoizedHashCode = hash;
6745      return hash;
6746    }
6747
6748    public static org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto parseFrom(
6749        com.google.protobuf.ByteString data)
6750        throws com.google.protobuf.InvalidProtocolBufferException {
6751      return PARSER.parseFrom(data);
6752    }
6753    public static org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto parseFrom(
6754        com.google.protobuf.ByteString data,
6755        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6756        throws com.google.protobuf.InvalidProtocolBufferException {
6757      return PARSER.parseFrom(data, extensionRegistry);
6758    }
6759    public static org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto parseFrom(byte[] data)
6760        throws com.google.protobuf.InvalidProtocolBufferException {
6761      return PARSER.parseFrom(data);
6762    }
6763    public static org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto parseFrom(
6764        byte[] data,
6765        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6766        throws com.google.protobuf.InvalidProtocolBufferException {
6767      return PARSER.parseFrom(data, extensionRegistry);
6768    }
6769    public static org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto parseFrom(java.io.InputStream input)
6770        throws java.io.IOException {
6771      return PARSER.parseFrom(input);
6772    }
6773    public static org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto parseFrom(
6774        java.io.InputStream input,
6775        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6776        throws java.io.IOException {
6777      return PARSER.parseFrom(input, extensionRegistry);
6778    }
6779    public static org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto parseDelimitedFrom(java.io.InputStream input)
6780        throws java.io.IOException {
6781      return PARSER.parseDelimitedFrom(input);
6782    }
6783    public static org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto parseDelimitedFrom(
6784        java.io.InputStream input,
6785        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6786        throws java.io.IOException {
6787      return PARSER.parseDelimitedFrom(input, extensionRegistry);
6788    }
6789    public static org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto parseFrom(
6790        com.google.protobuf.CodedInputStream input)
6791        throws java.io.IOException {
6792      return PARSER.parseFrom(input);
6793    }
6794    public static org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto parseFrom(
6795        com.google.protobuf.CodedInputStream input,
6796        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6797        throws java.io.IOException {
6798      return PARSER.parseFrom(input, extensionRegistry);
6799    }
6800
6801    public static Builder newBuilder() { return Builder.create(); }
6802    public Builder newBuilderForType() { return newBuilder(); }
6803    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto prototype) {
6804      return newBuilder().mergeFrom(prototype);
6805    }
6806    public Builder toBuilder() { return newBuilder(this); }
6807
6808    @java.lang.Override
6809    protected Builder newBuilderForType(
6810        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
6811      Builder builder = new Builder(parent);
6812      return builder;
6813    }
6814    /**
6815     * Protobuf type {@code hadoop.common.ExchangeRequestProto}
6816     */
6817    public static final class Builder extends
6818        com.google.protobuf.GeneratedMessage.Builder<Builder>
6819       implements org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProtoOrBuilder {
6820      public static final com.google.protobuf.Descriptors.Descriptor
6821          getDescriptor() {
6822        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_ExchangeRequestProto_descriptor;
6823      }
6824
6825      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
6826          internalGetFieldAccessorTable() {
6827        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_ExchangeRequestProto_fieldAccessorTable
6828            .ensureFieldAccessorsInitialized(
6829                org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto.class, org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto.Builder.class);
6830      }
6831
6832      // Construct using org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto.newBuilder()
6833      private Builder() {
6834        maybeForceBuilderInitialization();
6835      }
6836
6837      private Builder(
6838          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
6839        super(parent);
6840        maybeForceBuilderInitialization();
6841      }
6842      private void maybeForceBuilderInitialization() {
6843        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
6844        }
6845      }
6846      private static Builder create() {
6847        return new Builder();
6848      }
6849
6850      public Builder clear() {
6851        super.clear();
6852        values_ = java.util.Collections.emptyList();
6853        bitField0_ = (bitField0_ & ~0x00000001);
6854        return this;
6855      }
6856
6857      public Builder clone() {
6858        return create().mergeFrom(buildPartial());
6859      }
6860
6861      public com.google.protobuf.Descriptors.Descriptor
6862          getDescriptorForType() {
6863        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_ExchangeRequestProto_descriptor;
6864      }
6865
6866      public org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto getDefaultInstanceForType() {
6867        return org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto.getDefaultInstance();
6868      }
6869
6870      public org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto build() {
6871        org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto result = buildPartial();
6872        if (!result.isInitialized()) {
6873          throw newUninitializedMessageException(result);
6874        }
6875        return result;
6876      }
6877
6878      public org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto buildPartial() {
6879        org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto result = new org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto(this);
6880        int from_bitField0_ = bitField0_;
6881        if (((bitField0_ & 0x00000001) == 0x00000001)) {
6882          values_ = java.util.Collections.unmodifiableList(values_);
6883          bitField0_ = (bitField0_ & ~0x00000001);
6884        }
6885        result.values_ = values_;
6886        onBuilt();
6887        return result;
6888      }
6889
6890      public Builder mergeFrom(com.google.protobuf.Message other) {
6891        if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto) {
6892          return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto)other);
6893        } else {
6894          super.mergeFrom(other);
6895          return this;
6896        }
6897      }
6898
6899      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto other) {
6900        if (other == org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto.getDefaultInstance()) return this;
6901        if (!other.values_.isEmpty()) {
6902          if (values_.isEmpty()) {
6903            values_ = other.values_;
6904            bitField0_ = (bitField0_ & ~0x00000001);
6905          } else {
6906            ensureValuesIsMutable();
6907            values_.addAll(other.values_);
6908          }
6909          onChanged();
6910        }
6911        this.mergeUnknownFields(other.getUnknownFields());
6912        return this;
6913      }
6914
6915      public final boolean isInitialized() {
6916        return true;
6917      }
6918
6919      public Builder mergeFrom(
6920          com.google.protobuf.CodedInputStream input,
6921          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6922          throws java.io.IOException {
6923        org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto parsedMessage = null;
6924        try {
6925          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
6926        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
6927          parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto) e.getUnfinishedMessage();
6928          throw e;
6929        } finally {
6930          if (parsedMessage != null) {
6931            mergeFrom(parsedMessage);
6932          }
6933        }
6934        return this;
6935      }
6936      private int bitField0_;
6937
6938      // repeated int32 values = 1;
6939      private java.util.List<java.lang.Integer> values_ = java.util.Collections.emptyList();
6940      private void ensureValuesIsMutable() {
6941        if (!((bitField0_ & 0x00000001) == 0x00000001)) {
6942          values_ = new java.util.ArrayList<java.lang.Integer>(values_);
6943          bitField0_ |= 0x00000001;
6944         }
6945      }
6946      /**
6947       * <code>repeated int32 values = 1;</code>
6948       */
6949      public java.util.List<java.lang.Integer>
6950          getValuesList() {
6951        return java.util.Collections.unmodifiableList(values_);
6952      }
6953      /**
6954       * <code>repeated int32 values = 1;</code>
6955       */
6956      public int getValuesCount() {
6957        return values_.size();
6958      }
6959      /**
6960       * <code>repeated int32 values = 1;</code>
6961       */
6962      public int getValues(int index) {
6963        return values_.get(index);
6964      }
6965      /**
6966       * <code>repeated int32 values = 1;</code>
6967       */
6968      public Builder setValues(
6969          int index, int value) {
6970        ensureValuesIsMutable();
6971        values_.set(index, value);
6972        onChanged();
6973        return this;
6974      }
6975      /**
6976       * <code>repeated int32 values = 1;</code>
6977       */
6978      public Builder addValues(int value) {
6979        ensureValuesIsMutable();
6980        values_.add(value);
6981        onChanged();
6982        return this;
6983      }
6984      /**
6985       * <code>repeated int32 values = 1;</code>
6986       */
6987      public Builder addAllValues(
6988          java.lang.Iterable<? extends java.lang.Integer> values) {
6989        ensureValuesIsMutable();
6990        super.addAll(values, values_);
6991        onChanged();
6992        return this;
6993      }
6994      /**
6995       * <code>repeated int32 values = 1;</code>
6996       */
6997      public Builder clearValues() {
6998        values_ = java.util.Collections.emptyList();
6999        bitField0_ = (bitField0_ & ~0x00000001);
7000        onChanged();
7001        return this;
7002      }
7003
7004      // @@protoc_insertion_point(builder_scope:hadoop.common.ExchangeRequestProto)
7005    }
7006
7007    static {
7008      defaultInstance = new ExchangeRequestProto(true);
7009      defaultInstance.initFields();
7010    }
7011
7012    // @@protoc_insertion_point(class_scope:hadoop.common.ExchangeRequestProto)
7013  }
7014
7015  public interface ExchangeResponseProtoOrBuilder
7016      extends com.google.protobuf.MessageOrBuilder {
7017
7018    // repeated int32 values = 1;
7019    /**
7020     * <code>repeated int32 values = 1;</code>
7021     */
7022    java.util.List<java.lang.Integer> getValuesList();
7023    /**
7024     * <code>repeated int32 values = 1;</code>
7025     */
7026    int getValuesCount();
7027    /**
7028     * <code>repeated int32 values = 1;</code>
7029     */
7030    int getValues(int index);
7031  }
7032  /**
7033   * Protobuf type {@code hadoop.common.ExchangeResponseProto}
7034   */
7035  public static final class ExchangeResponseProto extends
7036      com.google.protobuf.GeneratedMessage
7037      implements ExchangeResponseProtoOrBuilder {
7038    // Use ExchangeResponseProto.newBuilder() to construct.
7039    private ExchangeResponseProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
7040      super(builder);
7041      this.unknownFields = builder.getUnknownFields();
7042    }
7043    private ExchangeResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
7044
7045    private static final ExchangeResponseProto defaultInstance;
7046    public static ExchangeResponseProto getDefaultInstance() {
7047      return defaultInstance;
7048    }
7049
7050    public ExchangeResponseProto getDefaultInstanceForType() {
7051      return defaultInstance;
7052    }
7053
7054    private final com.google.protobuf.UnknownFieldSet unknownFields;
7055    @java.lang.Override
7056    public final com.google.protobuf.UnknownFieldSet
7057        getUnknownFields() {
7058      return this.unknownFields;
7059    }
7060    private ExchangeResponseProto(
7061        com.google.protobuf.CodedInputStream input,
7062        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7063        throws com.google.protobuf.InvalidProtocolBufferException {
7064      initFields();
7065      int mutable_bitField0_ = 0;
7066      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
7067          com.google.protobuf.UnknownFieldSet.newBuilder();
7068      try {
7069        boolean done = false;
7070        while (!done) {
7071          int tag = input.readTag();
7072          switch (tag) {
7073            case 0:
7074              done = true;
7075              break;
7076            default: {
7077              if (!parseUnknownField(input, unknownFields,
7078                                     extensionRegistry, tag)) {
7079                done = true;
7080              }
7081              break;
7082            }
7083            case 8: {
7084              if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
7085                values_ = new java.util.ArrayList<java.lang.Integer>();
7086                mutable_bitField0_ |= 0x00000001;
7087              }
7088              values_.add(input.readInt32());
7089              break;
7090            }
7091            case 10: {
7092              int length = input.readRawVarint32();
7093              int limit = input.pushLimit(length);
7094              if (!((mutable_bitField0_ & 0x00000001) == 0x00000001) && input.getBytesUntilLimit() > 0) {
7095                values_ = new java.util.ArrayList<java.lang.Integer>();
7096                mutable_bitField0_ |= 0x00000001;
7097              }
7098              while (input.getBytesUntilLimit() > 0) {
7099                values_.add(input.readInt32());
7100              }
7101              input.popLimit(limit);
7102              break;
7103            }
7104          }
7105        }
7106      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
7107        throw e.setUnfinishedMessage(this);
7108      } catch (java.io.IOException e) {
7109        throw new com.google.protobuf.InvalidProtocolBufferException(
7110            e.getMessage()).setUnfinishedMessage(this);
7111      } finally {
7112        if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
7113          values_ = java.util.Collections.unmodifiableList(values_);
7114        }
7115        this.unknownFields = unknownFields.build();
7116        makeExtensionsImmutable();
7117      }
7118    }
7119    public static final com.google.protobuf.Descriptors.Descriptor
7120        getDescriptor() {
7121      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_ExchangeResponseProto_descriptor;
7122    }
7123
7124    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
7125        internalGetFieldAccessorTable() {
7126      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_ExchangeResponseProto_fieldAccessorTable
7127          .ensureFieldAccessorsInitialized(
7128              org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto.Builder.class);
7129    }
7130
7131    public static com.google.protobuf.Parser<ExchangeResponseProto> PARSER =
7132        new com.google.protobuf.AbstractParser<ExchangeResponseProto>() {
7133      public ExchangeResponseProto parsePartialFrom(
7134          com.google.protobuf.CodedInputStream input,
7135          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7136          throws com.google.protobuf.InvalidProtocolBufferException {
7137        return new ExchangeResponseProto(input, extensionRegistry);
7138      }
7139    };
7140
7141    @java.lang.Override
7142    public com.google.protobuf.Parser<ExchangeResponseProto> getParserForType() {
7143      return PARSER;
7144    }
7145
7146    // repeated int32 values = 1;
7147    public static final int VALUES_FIELD_NUMBER = 1;
7148    private java.util.List<java.lang.Integer> values_;
7149    /**
7150     * <code>repeated int32 values = 1;</code>
7151     */
7152    public java.util.List<java.lang.Integer>
7153        getValuesList() {
7154      return values_;
7155    }
7156    /**
7157     * <code>repeated int32 values = 1;</code>
7158     */
7159    public int getValuesCount() {
7160      return values_.size();
7161    }
7162    /**
7163     * <code>repeated int32 values = 1;</code>
7164     */
7165    public int getValues(int index) {
7166      return values_.get(index);
7167    }
7168
7169    private void initFields() {
7170      values_ = java.util.Collections.emptyList();
7171    }
7172    private byte memoizedIsInitialized = -1;
7173    public final boolean isInitialized() {
7174      byte isInitialized = memoizedIsInitialized;
7175      if (isInitialized != -1) return isInitialized == 1;
7176
7177      memoizedIsInitialized = 1;
7178      return true;
7179    }
7180
7181    public void writeTo(com.google.protobuf.CodedOutputStream output)
7182                        throws java.io.IOException {
7183      getSerializedSize();
7184      for (int i = 0; i < values_.size(); i++) {
7185        output.writeInt32(1, values_.get(i));
7186      }
7187      getUnknownFields().writeTo(output);
7188    }
7189
7190    private int memoizedSerializedSize = -1;
7191    public int getSerializedSize() {
7192      int size = memoizedSerializedSize;
7193      if (size != -1) return size;
7194
7195      size = 0;
7196      {
7197        int dataSize = 0;
7198        for (int i = 0; i < values_.size(); i++) {
7199          dataSize += com.google.protobuf.CodedOutputStream
7200            .computeInt32SizeNoTag(values_.get(i));
7201        }
7202        size += dataSize;
7203        size += 1 * getValuesList().size();
7204      }
7205      size += getUnknownFields().getSerializedSize();
7206      memoizedSerializedSize = size;
7207      return size;
7208    }
7209
7210    private static final long serialVersionUID = 0L;
7211    @java.lang.Override
7212    protected java.lang.Object writeReplace()
7213        throws java.io.ObjectStreamException {
7214      return super.writeReplace();
7215    }
7216
7217    @java.lang.Override
7218    public boolean equals(final java.lang.Object obj) {
7219      if (obj == this) {
7220       return true;
7221      }
7222      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto)) {
7223        return super.equals(obj);
7224      }
7225      org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto other = (org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto) obj;
7226
7227      boolean result = true;
7228      result = result && getValuesList()
7229          .equals(other.getValuesList());
7230      result = result &&
7231          getUnknownFields().equals(other.getUnknownFields());
7232      return result;
7233    }
7234
7235    private int memoizedHashCode = 0;
7236    @java.lang.Override
7237    public int hashCode() {
7238      if (memoizedHashCode != 0) {
7239        return memoizedHashCode;
7240      }
7241      int hash = 41;
7242      hash = (19 * hash) + getDescriptorForType().hashCode();
7243      if (getValuesCount() > 0) {
7244        hash = (37 * hash) + VALUES_FIELD_NUMBER;
7245        hash = (53 * hash) + getValuesList().hashCode();
7246      }
7247      hash = (29 * hash) + getUnknownFields().hashCode();
7248      memoizedHashCode = hash;
7249      return hash;
7250    }
7251
7252    public static org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto parseFrom(
7253        com.google.protobuf.ByteString data)
7254        throws com.google.protobuf.InvalidProtocolBufferException {
7255      return PARSER.parseFrom(data);
7256    }
7257    public static org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto parseFrom(
7258        com.google.protobuf.ByteString data,
7259        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7260        throws com.google.protobuf.InvalidProtocolBufferException {
7261      return PARSER.parseFrom(data, extensionRegistry);
7262    }
7263    public static org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto parseFrom(byte[] data)
7264        throws com.google.protobuf.InvalidProtocolBufferException {
7265      return PARSER.parseFrom(data);
7266    }
7267    public static org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto parseFrom(
7268        byte[] data,
7269        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7270        throws com.google.protobuf.InvalidProtocolBufferException {
7271      return PARSER.parseFrom(data, extensionRegistry);
7272    }
7273    public static org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto parseFrom(java.io.InputStream input)
7274        throws java.io.IOException {
7275      return PARSER.parseFrom(input);
7276    }
7277    public static org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto parseFrom(
7278        java.io.InputStream input,
7279        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7280        throws java.io.IOException {
7281      return PARSER.parseFrom(input, extensionRegistry);
7282    }
7283    public static org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto parseDelimitedFrom(java.io.InputStream input)
7284        throws java.io.IOException {
7285      return PARSER.parseDelimitedFrom(input);
7286    }
7287    public static org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto parseDelimitedFrom(
7288        java.io.InputStream input,
7289        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7290        throws java.io.IOException {
7291      return PARSER.parseDelimitedFrom(input, extensionRegistry);
7292    }
7293    public static org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto parseFrom(
7294        com.google.protobuf.CodedInputStream input)
7295        throws java.io.IOException {
7296      return PARSER.parseFrom(input);
7297    }
7298    public static org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto parseFrom(
7299        com.google.protobuf.CodedInputStream input,
7300        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7301        throws java.io.IOException {
7302      return PARSER.parseFrom(input, extensionRegistry);
7303    }
7304
7305    public static Builder newBuilder() { return Builder.create(); }
7306    public Builder newBuilderForType() { return newBuilder(); }
7307    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto prototype) {
7308      return newBuilder().mergeFrom(prototype);
7309    }
7310    public Builder toBuilder() { return newBuilder(this); }
7311
7312    @java.lang.Override
7313    protected Builder newBuilderForType(
7314        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
7315      Builder builder = new Builder(parent);
7316      return builder;
7317    }
7318    /**
7319     * Protobuf type {@code hadoop.common.ExchangeResponseProto}
7320     */
7321    public static final class Builder extends
7322        com.google.protobuf.GeneratedMessage.Builder<Builder>
7323       implements org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProtoOrBuilder {
7324      public static final com.google.protobuf.Descriptors.Descriptor
7325          getDescriptor() {
7326        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_ExchangeResponseProto_descriptor;
7327      }
7328
7329      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
7330          internalGetFieldAccessorTable() {
7331        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_ExchangeResponseProto_fieldAccessorTable
7332            .ensureFieldAccessorsInitialized(
7333                org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto.Builder.class);
7334      }
7335
7336      // Construct using org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto.newBuilder()
7337      private Builder() {
7338        maybeForceBuilderInitialization();
7339      }
7340
7341      private Builder(
7342          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
7343        super(parent);
7344        maybeForceBuilderInitialization();
7345      }
7346      private void maybeForceBuilderInitialization() {
7347        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
7348        }
7349      }
7350      private static Builder create() {
7351        return new Builder();
7352      }
7353
7354      public Builder clear() {
7355        super.clear();
7356        values_ = java.util.Collections.emptyList();
7357        bitField0_ = (bitField0_ & ~0x00000001);
7358        return this;
7359      }
7360
7361      public Builder clone() {
7362        return create().mergeFrom(buildPartial());
7363      }
7364
7365      public com.google.protobuf.Descriptors.Descriptor
7366          getDescriptorForType() {
7367        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_ExchangeResponseProto_descriptor;
7368      }
7369
7370      public org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto getDefaultInstanceForType() {
7371        return org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto.getDefaultInstance();
7372      }
7373
7374      public org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto build() {
7375        org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto result = buildPartial();
7376        if (!result.isInitialized()) {
7377          throw newUninitializedMessageException(result);
7378        }
7379        return result;
7380      }
7381
7382      public org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto buildPartial() {
7383        org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto result = new org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto(this);
7384        int from_bitField0_ = bitField0_;
7385        if (((bitField0_ & 0x00000001) == 0x00000001)) {
7386          values_ = java.util.Collections.unmodifiableList(values_);
7387          bitField0_ = (bitField0_ & ~0x00000001);
7388        }
7389        result.values_ = values_;
7390        onBuilt();
7391        return result;
7392      }
7393
7394      public Builder mergeFrom(com.google.protobuf.Message other) {
7395        if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto) {
7396          return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto)other);
7397        } else {
7398          super.mergeFrom(other);
7399          return this;
7400        }
7401      }
7402
7403      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto other) {
7404        if (other == org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto.getDefaultInstance()) return this;
7405        if (!other.values_.isEmpty()) {
7406          if (values_.isEmpty()) {
7407            values_ = other.values_;
7408            bitField0_ = (bitField0_ & ~0x00000001);
7409          } else {
7410            ensureValuesIsMutable();
7411            values_.addAll(other.values_);
7412          }
7413          onChanged();
7414        }
7415        this.mergeUnknownFields(other.getUnknownFields());
7416        return this;
7417      }
7418
7419      public final boolean isInitialized() {
7420        return true;
7421      }
7422
7423      public Builder mergeFrom(
7424          com.google.protobuf.CodedInputStream input,
7425          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7426          throws java.io.IOException {
7427        org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto parsedMessage = null;
7428        try {
7429          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
7430        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
7431          parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto) e.getUnfinishedMessage();
7432          throw e;
7433        } finally {
7434          if (parsedMessage != null) {
7435            mergeFrom(parsedMessage);
7436          }
7437        }
7438        return this;
7439      }
7440      private int bitField0_;
7441
7442      // repeated int32 values = 1;
7443      private java.util.List<java.lang.Integer> values_ = java.util.Collections.emptyList();
7444      private void ensureValuesIsMutable() {
7445        if (!((bitField0_ & 0x00000001) == 0x00000001)) {
7446          values_ = new java.util.ArrayList<java.lang.Integer>(values_);
7447          bitField0_ |= 0x00000001;
7448         }
7449      }
7450      /**
7451       * <code>repeated int32 values = 1;</code>
7452       */
7453      public java.util.List<java.lang.Integer>
7454          getValuesList() {
7455        return java.util.Collections.unmodifiableList(values_);
7456      }
7457      /**
7458       * <code>repeated int32 values = 1;</code>
7459       */
7460      public int getValuesCount() {
7461        return values_.size();
7462      }
7463      /**
7464       * <code>repeated int32 values = 1;</code>
7465       */
7466      public int getValues(int index) {
7467        return values_.get(index);
7468      }
7469      /**
7470       * <code>repeated int32 values = 1;</code>
7471       */
7472      public Builder setValues(
7473          int index, int value) {
7474        ensureValuesIsMutable();
7475        values_.set(index, value);
7476        onChanged();
7477        return this;
7478      }
7479      /**
7480       * <code>repeated int32 values = 1;</code>
7481       */
7482      public Builder addValues(int value) {
7483        ensureValuesIsMutable();
7484        values_.add(value);
7485        onChanged();
7486        return this;
7487      }
7488      /**
7489       * <code>repeated int32 values = 1;</code>
7490       */
7491      public Builder addAllValues(
7492          java.lang.Iterable<? extends java.lang.Integer> values) {
7493        ensureValuesIsMutable();
7494        super.addAll(values, values_);
7495        onChanged();
7496        return this;
7497      }
7498      /**
7499       * <code>repeated int32 values = 1;</code>
7500       */
7501      public Builder clearValues() {
7502        values_ = java.util.Collections.emptyList();
7503        bitField0_ = (bitField0_ & ~0x00000001);
7504        onChanged();
7505        return this;
7506      }
7507
7508      // @@protoc_insertion_point(builder_scope:hadoop.common.ExchangeResponseProto)
7509    }
7510
7511    static {
7512      defaultInstance = new ExchangeResponseProto(true);
7513      defaultInstance.initFields();
7514    }
7515
7516    // @@protoc_insertion_point(class_scope:hadoop.common.ExchangeResponseProto)
7517  }
7518
7519  public interface AuthMethodResponseProtoOrBuilder
7520      extends com.google.protobuf.MessageOrBuilder {
7521
7522    // required int32 code = 1;
7523    /**
7524     * <code>required int32 code = 1;</code>
7525     */
7526    boolean hasCode();
7527    /**
7528     * <code>required int32 code = 1;</code>
7529     */
7530    int getCode();
7531
7532    // required string mechanismName = 2;
7533    /**
7534     * <code>required string mechanismName = 2;</code>
7535     */
7536    boolean hasMechanismName();
7537    /**
7538     * <code>required string mechanismName = 2;</code>
7539     */
7540    java.lang.String getMechanismName();
7541    /**
7542     * <code>required string mechanismName = 2;</code>
7543     */
7544    com.google.protobuf.ByteString
7545        getMechanismNameBytes();
7546  }
7547  /**
7548   * Protobuf type {@code hadoop.common.AuthMethodResponseProto}
7549   */
7550  public static final class AuthMethodResponseProto extends
7551      com.google.protobuf.GeneratedMessage
7552      implements AuthMethodResponseProtoOrBuilder {
7553    // Use AuthMethodResponseProto.newBuilder() to construct.
7554    private AuthMethodResponseProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
7555      super(builder);
7556      this.unknownFields = builder.getUnknownFields();
7557    }
7558    private AuthMethodResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
7559
7560    private static final AuthMethodResponseProto defaultInstance;
7561    public static AuthMethodResponseProto getDefaultInstance() {
7562      return defaultInstance;
7563    }
7564
7565    public AuthMethodResponseProto getDefaultInstanceForType() {
7566      return defaultInstance;
7567    }
7568
7569    private final com.google.protobuf.UnknownFieldSet unknownFields;
7570    @java.lang.Override
7571    public final com.google.protobuf.UnknownFieldSet
7572        getUnknownFields() {
7573      return this.unknownFields;
7574    }
7575    private AuthMethodResponseProto(
7576        com.google.protobuf.CodedInputStream input,
7577        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7578        throws com.google.protobuf.InvalidProtocolBufferException {
7579      initFields();
7580      int mutable_bitField0_ = 0;
7581      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
7582          com.google.protobuf.UnknownFieldSet.newBuilder();
7583      try {
7584        boolean done = false;
7585        while (!done) {
7586          int tag = input.readTag();
7587          switch (tag) {
7588            case 0:
7589              done = true;
7590              break;
7591            default: {
7592              if (!parseUnknownField(input, unknownFields,
7593                                     extensionRegistry, tag)) {
7594                done = true;
7595              }
7596              break;
7597            }
7598            case 8: {
7599              bitField0_ |= 0x00000001;
7600              code_ = input.readInt32();
7601              break;
7602            }
7603            case 18: {
7604              bitField0_ |= 0x00000002;
7605              mechanismName_ = input.readBytes();
7606              break;
7607            }
7608          }
7609        }
7610      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
7611        throw e.setUnfinishedMessage(this);
7612      } catch (java.io.IOException e) {
7613        throw new com.google.protobuf.InvalidProtocolBufferException(
7614            e.getMessage()).setUnfinishedMessage(this);
7615      } finally {
7616        this.unknownFields = unknownFields.build();
7617        makeExtensionsImmutable();
7618      }
7619    }
7620    public static final com.google.protobuf.Descriptors.Descriptor
7621        getDescriptor() {
7622      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_AuthMethodResponseProto_descriptor;
7623    }
7624
7625    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
7626        internalGetFieldAccessorTable() {
7627      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_AuthMethodResponseProto_fieldAccessorTable
7628          .ensureFieldAccessorsInitialized(
7629              org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto.Builder.class);
7630    }
7631
7632    public static com.google.protobuf.Parser<AuthMethodResponseProto> PARSER =
7633        new com.google.protobuf.AbstractParser<AuthMethodResponseProto>() {
7634      public AuthMethodResponseProto parsePartialFrom(
7635          com.google.protobuf.CodedInputStream input,
7636          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7637          throws com.google.protobuf.InvalidProtocolBufferException {
7638        return new AuthMethodResponseProto(input, extensionRegistry);
7639      }
7640    };
7641
7642    @java.lang.Override
7643    public com.google.protobuf.Parser<AuthMethodResponseProto> getParserForType() {
7644      return PARSER;
7645    }
7646
7647    private int bitField0_;
7648    // required int32 code = 1;
7649    public static final int CODE_FIELD_NUMBER = 1;
7650    private int code_;
7651    /**
7652     * <code>required int32 code = 1;</code>
7653     */
7654    public boolean hasCode() {
7655      return ((bitField0_ & 0x00000001) == 0x00000001);
7656    }
7657    /**
7658     * <code>required int32 code = 1;</code>
7659     */
7660    public int getCode() {
7661      return code_;
7662    }
7663
7664    // required string mechanismName = 2;
7665    public static final int MECHANISMNAME_FIELD_NUMBER = 2;
7666    private java.lang.Object mechanismName_;
7667    /**
7668     * <code>required string mechanismName = 2;</code>
7669     */
7670    public boolean hasMechanismName() {
7671      return ((bitField0_ & 0x00000002) == 0x00000002);
7672    }
7673    /**
7674     * <code>required string mechanismName = 2;</code>
7675     */
7676    public java.lang.String getMechanismName() {
7677      java.lang.Object ref = mechanismName_;
7678      if (ref instanceof java.lang.String) {
7679        return (java.lang.String) ref;
7680      } else {
7681        com.google.protobuf.ByteString bs = 
7682            (com.google.protobuf.ByteString) ref;
7683        java.lang.String s = bs.toStringUtf8();
7684        if (bs.isValidUtf8()) {
7685          mechanismName_ = s;
7686        }
7687        return s;
7688      }
7689    }
7690    /**
7691     * <code>required string mechanismName = 2;</code>
7692     */
7693    public com.google.protobuf.ByteString
7694        getMechanismNameBytes() {
7695      java.lang.Object ref = mechanismName_;
7696      if (ref instanceof java.lang.String) {
7697        com.google.protobuf.ByteString b = 
7698            com.google.protobuf.ByteString.copyFromUtf8(
7699                (java.lang.String) ref);
7700        mechanismName_ = b;
7701        return b;
7702      } else {
7703        return (com.google.protobuf.ByteString) ref;
7704      }
7705    }
7706
7707    private void initFields() {
7708      code_ = 0;
7709      mechanismName_ = "";
7710    }
7711    private byte memoizedIsInitialized = -1;
7712    public final boolean isInitialized() {
7713      byte isInitialized = memoizedIsInitialized;
7714      if (isInitialized != -1) return isInitialized == 1;
7715
7716      if (!hasCode()) {
7717        memoizedIsInitialized = 0;
7718        return false;
7719      }
7720      if (!hasMechanismName()) {
7721        memoizedIsInitialized = 0;
7722        return false;
7723      }
7724      memoizedIsInitialized = 1;
7725      return true;
7726    }
7727
7728    public void writeTo(com.google.protobuf.CodedOutputStream output)
7729                        throws java.io.IOException {
7730      getSerializedSize();
7731      if (((bitField0_ & 0x00000001) == 0x00000001)) {
7732        output.writeInt32(1, code_);
7733      }
7734      if (((bitField0_ & 0x00000002) == 0x00000002)) {
7735        output.writeBytes(2, getMechanismNameBytes());
7736      }
7737      getUnknownFields().writeTo(output);
7738    }
7739
7740    private int memoizedSerializedSize = -1;
7741    public int getSerializedSize() {
7742      int size = memoizedSerializedSize;
7743      if (size != -1) return size;
7744
7745      size = 0;
7746      if (((bitField0_ & 0x00000001) == 0x00000001)) {
7747        size += com.google.protobuf.CodedOutputStream
7748          .computeInt32Size(1, code_);
7749      }
7750      if (((bitField0_ & 0x00000002) == 0x00000002)) {
7751        size += com.google.protobuf.CodedOutputStream
7752          .computeBytesSize(2, getMechanismNameBytes());
7753      }
7754      size += getUnknownFields().getSerializedSize();
7755      memoizedSerializedSize = size;
7756      return size;
7757    }
7758
7759    private static final long serialVersionUID = 0L;
7760    @java.lang.Override
7761    protected java.lang.Object writeReplace()
7762        throws java.io.ObjectStreamException {
7763      return super.writeReplace();
7764    }
7765
7766    @java.lang.Override
7767    public boolean equals(final java.lang.Object obj) {
7768      if (obj == this) {
7769       return true;
7770      }
7771      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto)) {
7772        return super.equals(obj);
7773      }
7774      org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto other = (org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto) obj;
7775
7776      boolean result = true;
7777      result = result && (hasCode() == other.hasCode());
7778      if (hasCode()) {
7779        result = result && (getCode()
7780            == other.getCode());
7781      }
7782      result = result && (hasMechanismName() == other.hasMechanismName());
7783      if (hasMechanismName()) {
7784        result = result && getMechanismName()
7785            .equals(other.getMechanismName());
7786      }
7787      result = result &&
7788          getUnknownFields().equals(other.getUnknownFields());
7789      return result;
7790    }
7791
7792    private int memoizedHashCode = 0;
7793    @java.lang.Override
7794    public int hashCode() {
7795      if (memoizedHashCode != 0) {
7796        return memoizedHashCode;
7797      }
7798      int hash = 41;
7799      hash = (19 * hash) + getDescriptorForType().hashCode();
7800      if (hasCode()) {
7801        hash = (37 * hash) + CODE_FIELD_NUMBER;
7802        hash = (53 * hash) + getCode();
7803      }
7804      if (hasMechanismName()) {
7805        hash = (37 * hash) + MECHANISMNAME_FIELD_NUMBER;
7806        hash = (53 * hash) + getMechanismName().hashCode();
7807      }
7808      hash = (29 * hash) + getUnknownFields().hashCode();
7809      memoizedHashCode = hash;
7810      return hash;
7811    }
7812
7813    public static org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto parseFrom(
7814        com.google.protobuf.ByteString data)
7815        throws com.google.protobuf.InvalidProtocolBufferException {
7816      return PARSER.parseFrom(data);
7817    }
7818    public static org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto parseFrom(
7819        com.google.protobuf.ByteString data,
7820        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7821        throws com.google.protobuf.InvalidProtocolBufferException {
7822      return PARSER.parseFrom(data, extensionRegistry);
7823    }
7824    public static org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto parseFrom(byte[] data)
7825        throws com.google.protobuf.InvalidProtocolBufferException {
7826      return PARSER.parseFrom(data);
7827    }
7828    public static org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto parseFrom(
7829        byte[] data,
7830        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7831        throws com.google.protobuf.InvalidProtocolBufferException {
7832      return PARSER.parseFrom(data, extensionRegistry);
7833    }
7834    public static org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto parseFrom(java.io.InputStream input)
7835        throws java.io.IOException {
7836      return PARSER.parseFrom(input);
7837    }
7838    public static org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto parseFrom(
7839        java.io.InputStream input,
7840        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7841        throws java.io.IOException {
7842      return PARSER.parseFrom(input, extensionRegistry);
7843    }
7844    public static org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto parseDelimitedFrom(java.io.InputStream input)
7845        throws java.io.IOException {
7846      return PARSER.parseDelimitedFrom(input);
7847    }
7848    public static org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto parseDelimitedFrom(
7849        java.io.InputStream input,
7850        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7851        throws java.io.IOException {
7852      return PARSER.parseDelimitedFrom(input, extensionRegistry);
7853    }
7854    public static org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto parseFrom(
7855        com.google.protobuf.CodedInputStream input)
7856        throws java.io.IOException {
7857      return PARSER.parseFrom(input);
7858    }
7859    public static org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto parseFrom(
7860        com.google.protobuf.CodedInputStream input,
7861        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7862        throws java.io.IOException {
7863      return PARSER.parseFrom(input, extensionRegistry);
7864    }
7865
7866    public static Builder newBuilder() { return Builder.create(); }
7867    public Builder newBuilderForType() { return newBuilder(); }
7868    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto prototype) {
7869      return newBuilder().mergeFrom(prototype);
7870    }
7871    public Builder toBuilder() { return newBuilder(this); }
7872
7873    @java.lang.Override
7874    protected Builder newBuilderForType(
7875        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
7876      Builder builder = new Builder(parent);
7877      return builder;
7878    }
7879    /**
7880     * Protobuf type {@code hadoop.common.AuthMethodResponseProto}
7881     */
7882    public static final class Builder extends
7883        com.google.protobuf.GeneratedMessage.Builder<Builder>
7884       implements org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProtoOrBuilder {
7885      public static final com.google.protobuf.Descriptors.Descriptor
7886          getDescriptor() {
7887        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_AuthMethodResponseProto_descriptor;
7888      }
7889
7890      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
7891          internalGetFieldAccessorTable() {
7892        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_AuthMethodResponseProto_fieldAccessorTable
7893            .ensureFieldAccessorsInitialized(
7894                org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto.Builder.class);
7895      }
7896
7897      // Construct using org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto.newBuilder()
7898      private Builder() {
7899        maybeForceBuilderInitialization();
7900      }
7901
7902      private Builder(
7903          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
7904        super(parent);
7905        maybeForceBuilderInitialization();
7906      }
7907      private void maybeForceBuilderInitialization() {
7908        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
7909        }
7910      }
7911      private static Builder create() {
7912        return new Builder();
7913      }
7914
7915      public Builder clear() {
7916        super.clear();
7917        code_ = 0;
7918        bitField0_ = (bitField0_ & ~0x00000001);
7919        mechanismName_ = "";
7920        bitField0_ = (bitField0_ & ~0x00000002);
7921        return this;
7922      }
7923
7924      public Builder clone() {
7925        return create().mergeFrom(buildPartial());
7926      }
7927
7928      public com.google.protobuf.Descriptors.Descriptor
7929          getDescriptorForType() {
7930        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_AuthMethodResponseProto_descriptor;
7931      }
7932
7933      public org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto getDefaultInstanceForType() {
7934        return org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto.getDefaultInstance();
7935      }
7936
7937      public org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto build() {
7938        org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto result = buildPartial();
7939        if (!result.isInitialized()) {
7940          throw newUninitializedMessageException(result);
7941        }
7942        return result;
7943      }
7944
7945      public org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto buildPartial() {
7946        org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto result = new org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto(this);
7947        int from_bitField0_ = bitField0_;
7948        int to_bitField0_ = 0;
7949        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
7950          to_bitField0_ |= 0x00000001;
7951        }
7952        result.code_ = code_;
7953        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
7954          to_bitField0_ |= 0x00000002;
7955        }
7956        result.mechanismName_ = mechanismName_;
7957        result.bitField0_ = to_bitField0_;
7958        onBuilt();
7959        return result;
7960      }
7961
7962      public Builder mergeFrom(com.google.protobuf.Message other) {
7963        if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto) {
7964          return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto)other);
7965        } else {
7966          super.mergeFrom(other);
7967          return this;
7968        }
7969      }
7970
7971      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto other) {
7972        if (other == org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto.getDefaultInstance()) return this;
7973        if (other.hasCode()) {
7974          setCode(other.getCode());
7975        }
7976        if (other.hasMechanismName()) {
7977          bitField0_ |= 0x00000002;
7978          mechanismName_ = other.mechanismName_;
7979          onChanged();
7980        }
7981        this.mergeUnknownFields(other.getUnknownFields());
7982        return this;
7983      }
7984
7985      public final boolean isInitialized() {
7986        if (!hasCode()) {
7987          
7988          return false;
7989        }
7990        if (!hasMechanismName()) {
7991          
7992          return false;
7993        }
7994        return true;
7995      }
7996
7997      public Builder mergeFrom(
7998          com.google.protobuf.CodedInputStream input,
7999          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8000          throws java.io.IOException {
8001        org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto parsedMessage = null;
8002        try {
8003          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
8004        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
8005          parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto) e.getUnfinishedMessage();
8006          throw e;
8007        } finally {
8008          if (parsedMessage != null) {
8009            mergeFrom(parsedMessage);
8010          }
8011        }
8012        return this;
8013      }
8014      private int bitField0_;
8015
8016      // required int32 code = 1;
8017      private int code_ ;
8018      /**
8019       * <code>required int32 code = 1;</code>
8020       */
8021      public boolean hasCode() {
8022        return ((bitField0_ & 0x00000001) == 0x00000001);
8023      }
8024      /**
8025       * <code>required int32 code = 1;</code>
8026       */
8027      public int getCode() {
8028        return code_;
8029      }
8030      /**
8031       * <code>required int32 code = 1;</code>
8032       */
8033      public Builder setCode(int value) {
8034        bitField0_ |= 0x00000001;
8035        code_ = value;
8036        onChanged();
8037        return this;
8038      }
8039      /**
8040       * <code>required int32 code = 1;</code>
8041       */
8042      public Builder clearCode() {
8043        bitField0_ = (bitField0_ & ~0x00000001);
8044        code_ = 0;
8045        onChanged();
8046        return this;
8047      }
8048
8049      // required string mechanismName = 2;
8050      private java.lang.Object mechanismName_ = "";
8051      /**
8052       * <code>required string mechanismName = 2;</code>
8053       */
8054      public boolean hasMechanismName() {
8055        return ((bitField0_ & 0x00000002) == 0x00000002);
8056      }
8057      /**
8058       * <code>required string mechanismName = 2;</code>
8059       */
8060      public java.lang.String getMechanismName() {
8061        java.lang.Object ref = mechanismName_;
8062        if (!(ref instanceof java.lang.String)) {
8063          java.lang.String s = ((com.google.protobuf.ByteString) ref)
8064              .toStringUtf8();
8065          mechanismName_ = s;
8066          return s;
8067        } else {
8068          return (java.lang.String) ref;
8069        }
8070      }
8071      /**
8072       * <code>required string mechanismName = 2;</code>
8073       */
8074      public com.google.protobuf.ByteString
8075          getMechanismNameBytes() {
8076        java.lang.Object ref = mechanismName_;
8077        if (ref instanceof String) {
8078          com.google.protobuf.ByteString b = 
8079              com.google.protobuf.ByteString.copyFromUtf8(
8080                  (java.lang.String) ref);
8081          mechanismName_ = b;
8082          return b;
8083        } else {
8084          return (com.google.protobuf.ByteString) ref;
8085        }
8086      }
8087      /**
8088       * <code>required string mechanismName = 2;</code>
8089       */
8090      public Builder setMechanismName(
8091          java.lang.String value) {
8092        if (value == null) {
8093    throw new NullPointerException();
8094  }
8095  bitField0_ |= 0x00000002;
8096        mechanismName_ = value;
8097        onChanged();
8098        return this;
8099      }
8100      /**
8101       * <code>required string mechanismName = 2;</code>
8102       */
8103      public Builder clearMechanismName() {
8104        bitField0_ = (bitField0_ & ~0x00000002);
8105        mechanismName_ = getDefaultInstance().getMechanismName();
8106        onChanged();
8107        return this;
8108      }
8109      /**
8110       * <code>required string mechanismName = 2;</code>
8111       */
8112      public Builder setMechanismNameBytes(
8113          com.google.protobuf.ByteString value) {
8114        if (value == null) {
8115    throw new NullPointerException();
8116  }
8117  bitField0_ |= 0x00000002;
8118        mechanismName_ = value;
8119        onChanged();
8120        return this;
8121      }
8122
8123      // @@protoc_insertion_point(builder_scope:hadoop.common.AuthMethodResponseProto)
8124    }
8125
8126    static {
8127      defaultInstance = new AuthMethodResponseProto(true);
8128      defaultInstance.initFields();
8129    }
8130
8131    // @@protoc_insertion_point(class_scope:hadoop.common.AuthMethodResponseProto)
8132  }
8133
8134  public interface AuthUserResponseProtoOrBuilder
8135      extends com.google.protobuf.MessageOrBuilder {
8136
8137    // required string authUser = 1;
8138    /**
8139     * <code>required string authUser = 1;</code>
8140     */
8141    boolean hasAuthUser();
8142    /**
8143     * <code>required string authUser = 1;</code>
8144     */
8145    java.lang.String getAuthUser();
8146    /**
8147     * <code>required string authUser = 1;</code>
8148     */
8149    com.google.protobuf.ByteString
8150        getAuthUserBytes();
8151  }
8152  /**
8153   * Protobuf type {@code hadoop.common.AuthUserResponseProto}
8154   */
8155  public static final class AuthUserResponseProto extends
8156      com.google.protobuf.GeneratedMessage
8157      implements AuthUserResponseProtoOrBuilder {
8158    // Use AuthUserResponseProto.newBuilder() to construct.
8159    private AuthUserResponseProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
8160      super(builder);
8161      this.unknownFields = builder.getUnknownFields();
8162    }
8163    private AuthUserResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
8164
8165    private static final AuthUserResponseProto defaultInstance;
8166    public static AuthUserResponseProto getDefaultInstance() {
8167      return defaultInstance;
8168    }
8169
8170    public AuthUserResponseProto getDefaultInstanceForType() {
8171      return defaultInstance;
8172    }
8173
8174    private final com.google.protobuf.UnknownFieldSet unknownFields;
8175    @java.lang.Override
8176    public final com.google.protobuf.UnknownFieldSet
8177        getUnknownFields() {
8178      return this.unknownFields;
8179    }
8180    private AuthUserResponseProto(
8181        com.google.protobuf.CodedInputStream input,
8182        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8183        throws com.google.protobuf.InvalidProtocolBufferException {
8184      initFields();
8185      int mutable_bitField0_ = 0;
8186      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
8187          com.google.protobuf.UnknownFieldSet.newBuilder();
8188      try {
8189        boolean done = false;
8190        while (!done) {
8191          int tag = input.readTag();
8192          switch (tag) {
8193            case 0:
8194              done = true;
8195              break;
8196            default: {
8197              if (!parseUnknownField(input, unknownFields,
8198                                     extensionRegistry, tag)) {
8199                done = true;
8200              }
8201              break;
8202            }
8203            case 10: {
8204              bitField0_ |= 0x00000001;
8205              authUser_ = input.readBytes();
8206              break;
8207            }
8208          }
8209        }
8210      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
8211        throw e.setUnfinishedMessage(this);
8212      } catch (java.io.IOException e) {
8213        throw new com.google.protobuf.InvalidProtocolBufferException(
8214            e.getMessage()).setUnfinishedMessage(this);
8215      } finally {
8216        this.unknownFields = unknownFields.build();
8217        makeExtensionsImmutable();
8218      }
8219    }
8220    public static final com.google.protobuf.Descriptors.Descriptor
8221        getDescriptor() {
8222      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_AuthUserResponseProto_descriptor;
8223    }
8224
8225    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
8226        internalGetFieldAccessorTable() {
8227      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_AuthUserResponseProto_fieldAccessorTable
8228          .ensureFieldAccessorsInitialized(
8229              org.apache.hadoop.ipc.protobuf.TestProtos.AuthUserResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtos.AuthUserResponseProto.Builder.class);
8230    }
8231
8232    public static com.google.protobuf.Parser<AuthUserResponseProto> PARSER =
8233        new com.google.protobuf.AbstractParser<AuthUserResponseProto>() {
8234      public AuthUserResponseProto parsePartialFrom(
8235          com.google.protobuf.CodedInputStream input,
8236          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8237          throws com.google.protobuf.InvalidProtocolBufferException {
8238        return new AuthUserResponseProto(input, extensionRegistry);
8239      }
8240    };
8241
8242    @java.lang.Override
8243    public com.google.protobuf.Parser<AuthUserResponseProto> getParserForType() {
8244      return PARSER;
8245    }
8246
8247    private int bitField0_;
8248    // required string authUser = 1;
8249    public static final int AUTHUSER_FIELD_NUMBER = 1;
8250    private java.lang.Object authUser_;
8251    /**
8252     * <code>required string authUser = 1;</code>
8253     */
8254    public boolean hasAuthUser() {
8255      return ((bitField0_ & 0x00000001) == 0x00000001);
8256    }
8257    /**
8258     * <code>required string authUser = 1;</code>
8259     */
8260    public java.lang.String getAuthUser() {
8261      java.lang.Object ref = authUser_;
8262      if (ref instanceof java.lang.String) {
8263        return (java.lang.String) ref;
8264      } else {
8265        com.google.protobuf.ByteString bs = 
8266            (com.google.protobuf.ByteString) ref;
8267        java.lang.String s = bs.toStringUtf8();
8268        if (bs.isValidUtf8()) {
8269          authUser_ = s;
8270        }
8271        return s;
8272      }
8273    }
8274    /**
8275     * <code>required string authUser = 1;</code>
8276     */
8277    public com.google.protobuf.ByteString
8278        getAuthUserBytes() {
8279      java.lang.Object ref = authUser_;
8280      if (ref instanceof java.lang.String) {
8281        com.google.protobuf.ByteString b = 
8282            com.google.protobuf.ByteString.copyFromUtf8(
8283                (java.lang.String) ref);
8284        authUser_ = b;
8285        return b;
8286      } else {
8287        return (com.google.protobuf.ByteString) ref;
8288      }
8289    }
8290
8291    private void initFields() {
8292      authUser_ = "";
8293    }
8294    private byte memoizedIsInitialized = -1;
8295    public final boolean isInitialized() {
8296      byte isInitialized = memoizedIsInitialized;
8297      if (isInitialized != -1) return isInitialized == 1;
8298
8299      if (!hasAuthUser()) {
8300        memoizedIsInitialized = 0;
8301        return false;
8302      }
8303      memoizedIsInitialized = 1;
8304      return true;
8305    }
8306
8307    public void writeTo(com.google.protobuf.CodedOutputStream output)
8308                        throws java.io.IOException {
8309      getSerializedSize();
8310      if (((bitField0_ & 0x00000001) == 0x00000001)) {
8311        output.writeBytes(1, getAuthUserBytes());
8312      }
8313      getUnknownFields().writeTo(output);
8314    }
8315
8316    private int memoizedSerializedSize = -1;
8317    public int getSerializedSize() {
8318      int size = memoizedSerializedSize;
8319      if (size != -1) return size;
8320
8321      size = 0;
8322      if (((bitField0_ & 0x00000001) == 0x00000001)) {
8323        size += com.google.protobuf.CodedOutputStream
8324          .computeBytesSize(1, getAuthUserBytes());
8325      }
8326      size += getUnknownFields().getSerializedSize();
8327      memoizedSerializedSize = size;
8328      return size;
8329    }
8330
8331    private static final long serialVersionUID = 0L;
8332    @java.lang.Override
8333    protected java.lang.Object writeReplace()
8334        throws java.io.ObjectStreamException {
8335      return super.writeReplace();
8336    }
8337
8338    @java.lang.Override
8339    public boolean equals(final java.lang.Object obj) {
8340      if (obj == this) {
8341       return true;
8342      }
8343      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtos.AuthUserResponseProto)) {
8344        return super.equals(obj);
8345      }
8346      org.apache.hadoop.ipc.protobuf.TestProtos.AuthUserResponseProto other = (org.apache.hadoop.ipc.protobuf.TestProtos.AuthUserResponseProto) obj;
8347
8348      boolean result = true;
8349      result = result && (hasAuthUser() == other.hasAuthUser());
8350      if (hasAuthUser()) {
8351        result = result && getAuthUser()
8352            .equals(other.getAuthUser());
8353      }
8354      result = result &&
8355          getUnknownFields().equals(other.getUnknownFields());
8356      return result;
8357    }
8358
8359    private int memoizedHashCode = 0;
8360    @java.lang.Override
8361    public int hashCode() {
8362      if (memoizedHashCode != 0) {
8363        return memoizedHashCode;
8364      }
8365      int hash = 41;
8366      hash = (19 * hash) + getDescriptorForType().hashCode();
8367      if (hasAuthUser()) {
8368        hash = (37 * hash) + AUTHUSER_FIELD_NUMBER;
8369        hash = (53 * hash) + getAuthUser().hashCode();
8370      }
8371      hash = (29 * hash) + getUnknownFields().hashCode();
8372      memoizedHashCode = hash;
8373      return hash;
8374    }
8375
8376    public static org.apache.hadoop.ipc.protobuf.TestProtos.AuthUserResponseProto parseFrom(
8377        com.google.protobuf.ByteString data)
8378        throws com.google.protobuf.InvalidProtocolBufferException {
8379      return PARSER.parseFrom(data);
8380    }
8381    public static org.apache.hadoop.ipc.protobuf.TestProtos.AuthUserResponseProto parseFrom(
8382        com.google.protobuf.ByteString data,
8383        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8384        throws com.google.protobuf.InvalidProtocolBufferException {
8385      return PARSER.parseFrom(data, extensionRegistry);
8386    }
8387    public static org.apache.hadoop.ipc.protobuf.TestProtos.AuthUserResponseProto parseFrom(byte[] data)
8388        throws com.google.protobuf.InvalidProtocolBufferException {
8389      return PARSER.parseFrom(data);
8390    }
8391    public static org.apache.hadoop.ipc.protobuf.TestProtos.AuthUserResponseProto parseFrom(
8392        byte[] data,
8393        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8394        throws com.google.protobuf.InvalidProtocolBufferException {
8395      return PARSER.parseFrom(data, extensionRegistry);
8396    }
8397    public static org.apache.hadoop.ipc.protobuf.TestProtos.AuthUserResponseProto parseFrom(java.io.InputStream input)
8398        throws java.io.IOException {
8399      return PARSER.parseFrom(input);
8400    }
8401    public static org.apache.hadoop.ipc.protobuf.TestProtos.AuthUserResponseProto parseFrom(
8402        java.io.InputStream input,
8403        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8404        throws java.io.IOException {
8405      return PARSER.parseFrom(input, extensionRegistry);
8406    }
8407    public static org.apache.hadoop.ipc.protobuf.TestProtos.AuthUserResponseProto parseDelimitedFrom(java.io.InputStream input)
8408        throws java.io.IOException {
8409      return PARSER.parseDelimitedFrom(input);
8410    }
8411    public static org.apache.hadoop.ipc.protobuf.TestProtos.AuthUserResponseProto parseDelimitedFrom(
8412        java.io.InputStream input,
8413        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8414        throws java.io.IOException {
8415      return PARSER.parseDelimitedFrom(input, extensionRegistry);
8416    }
8417    public static org.apache.hadoop.ipc.protobuf.TestProtos.AuthUserResponseProto parseFrom(
8418        com.google.protobuf.CodedInputStream input)
8419        throws java.io.IOException {
8420      return PARSER.parseFrom(input);
8421    }
8422    public static org.apache.hadoop.ipc.protobuf.TestProtos.AuthUserResponseProto parseFrom(
8423        com.google.protobuf.CodedInputStream input,
8424        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8425        throws java.io.IOException {
8426      return PARSER.parseFrom(input, extensionRegistry);
8427    }
8428
8429    public static Builder newBuilder() { return Builder.create(); }
8430    public Builder newBuilderForType() { return newBuilder(); }
8431    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtos.AuthUserResponseProto prototype) {
8432      return newBuilder().mergeFrom(prototype);
8433    }
8434    public Builder toBuilder() { return newBuilder(this); }
8435
8436    @java.lang.Override
8437    protected Builder newBuilderForType(
8438        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
8439      Builder builder = new Builder(parent);
8440      return builder;
8441    }
8442    /**
8443     * Protobuf type {@code hadoop.common.AuthUserResponseProto}
8444     */
8445    public static final class Builder extends
8446        com.google.protobuf.GeneratedMessage.Builder<Builder>
8447       implements org.apache.hadoop.ipc.protobuf.TestProtos.AuthUserResponseProtoOrBuilder {
8448      public static final com.google.protobuf.Descriptors.Descriptor
8449          getDescriptor() {
8450        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_AuthUserResponseProto_descriptor;
8451      }
8452
8453      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
8454          internalGetFieldAccessorTable() {
8455        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_AuthUserResponseProto_fieldAccessorTable
8456            .ensureFieldAccessorsInitialized(
8457                org.apache.hadoop.ipc.protobuf.TestProtos.AuthUserResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtos.AuthUserResponseProto.Builder.class);
8458      }
8459
8460      // Construct using org.apache.hadoop.ipc.protobuf.TestProtos.AuthUserResponseProto.newBuilder()
8461      private Builder() {
8462        maybeForceBuilderInitialization();
8463      }
8464
8465      private Builder(
8466          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
8467        super(parent);
8468        maybeForceBuilderInitialization();
8469      }
8470      private void maybeForceBuilderInitialization() {
8471        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
8472        }
8473      }
8474      private static Builder create() {
8475        return new Builder();
8476      }
8477
8478      public Builder clear() {
8479        super.clear();
8480        authUser_ = "";
8481        bitField0_ = (bitField0_ & ~0x00000001);
8482        return this;
8483      }
8484
8485      public Builder clone() {
8486        return create().mergeFrom(buildPartial());
8487      }
8488
8489      public com.google.protobuf.Descriptors.Descriptor
8490          getDescriptorForType() {
8491        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_AuthUserResponseProto_descriptor;
8492      }
8493
8494      public org.apache.hadoop.ipc.protobuf.TestProtos.AuthUserResponseProto getDefaultInstanceForType() {
8495        return org.apache.hadoop.ipc.protobuf.TestProtos.AuthUserResponseProto.getDefaultInstance();
8496      }
8497
8498      public org.apache.hadoop.ipc.protobuf.TestProtos.AuthUserResponseProto build() {
8499        org.apache.hadoop.ipc.protobuf.TestProtos.AuthUserResponseProto result = buildPartial();
8500        if (!result.isInitialized()) {
8501          throw newUninitializedMessageException(result);
8502        }
8503        return result;
8504      }
8505
8506      public org.apache.hadoop.ipc.protobuf.TestProtos.AuthUserResponseProto buildPartial() {
8507        org.apache.hadoop.ipc.protobuf.TestProtos.AuthUserResponseProto result = new org.apache.hadoop.ipc.protobuf.TestProtos.AuthUserResponseProto(this);
8508        int from_bitField0_ = bitField0_;
8509        int to_bitField0_ = 0;
8510        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
8511          to_bitField0_ |= 0x00000001;
8512        }
8513        result.authUser_ = authUser_;
8514        result.bitField0_ = to_bitField0_;
8515        onBuilt();
8516        return result;
8517      }
8518
8519      public Builder mergeFrom(com.google.protobuf.Message other) {
8520        if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtos.AuthUserResponseProto) {
8521          return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtos.AuthUserResponseProto)other);
8522        } else {
8523          super.mergeFrom(other);
8524          return this;
8525        }
8526      }
8527
8528      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtos.AuthUserResponseProto other) {
8529        if (other == org.apache.hadoop.ipc.protobuf.TestProtos.AuthUserResponseProto.getDefaultInstance()) return this;
8530        if (other.hasAuthUser()) {
8531          bitField0_ |= 0x00000001;
8532          authUser_ = other.authUser_;
8533          onChanged();
8534        }
8535        this.mergeUnknownFields(other.getUnknownFields());
8536        return this;
8537      }
8538
8539      public final boolean isInitialized() {
8540        if (!hasAuthUser()) {
8541          
8542          return false;
8543        }
8544        return true;
8545      }
8546
8547      public Builder mergeFrom(
8548          com.google.protobuf.CodedInputStream input,
8549          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8550          throws java.io.IOException {
8551        org.apache.hadoop.ipc.protobuf.TestProtos.AuthUserResponseProto parsedMessage = null;
8552        try {
8553          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
8554        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
8555          parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtos.AuthUserResponseProto) e.getUnfinishedMessage();
8556          throw e;
8557        } finally {
8558          if (parsedMessage != null) {
8559            mergeFrom(parsedMessage);
8560          }
8561        }
8562        return this;
8563      }
8564      private int bitField0_;
8565
8566      // required string authUser = 1;
8567      private java.lang.Object authUser_ = "";
8568      /**
8569       * <code>required string authUser = 1;</code>
8570       */
8571      public boolean hasAuthUser() {
8572        return ((bitField0_ & 0x00000001) == 0x00000001);
8573      }
8574      /**
8575       * <code>required string authUser = 1;</code>
8576       */
8577      public java.lang.String getAuthUser() {
8578        java.lang.Object ref = authUser_;
8579        if (!(ref instanceof java.lang.String)) {
8580          java.lang.String s = ((com.google.protobuf.ByteString) ref)
8581              .toStringUtf8();
8582          authUser_ = s;
8583          return s;
8584        } else {
8585          return (java.lang.String) ref;
8586        }
8587      }
8588      /**
8589       * <code>required string authUser = 1;</code>
8590       */
8591      public com.google.protobuf.ByteString
8592          getAuthUserBytes() {
8593        java.lang.Object ref = authUser_;
8594        if (ref instanceof String) {
8595          com.google.protobuf.ByteString b = 
8596              com.google.protobuf.ByteString.copyFromUtf8(
8597                  (java.lang.String) ref);
8598          authUser_ = b;
8599          return b;
8600        } else {
8601          return (com.google.protobuf.ByteString) ref;
8602        }
8603      }
8604      /**
8605       * <code>required string authUser = 1;</code>
8606       */
8607      public Builder setAuthUser(
8608          java.lang.String value) {
8609        if (value == null) {
8610    throw new NullPointerException();
8611  }
8612  bitField0_ |= 0x00000001;
8613        authUser_ = value;
8614        onChanged();
8615        return this;
8616      }
8617      /**
8618       * <code>required string authUser = 1;</code>
8619       */
8620      public Builder clearAuthUser() {
8621        bitField0_ = (bitField0_ & ~0x00000001);
8622        authUser_ = getDefaultInstance().getAuthUser();
8623        onChanged();
8624        return this;
8625      }
8626      /**
8627       * <code>required string authUser = 1;</code>
8628       */
8629      public Builder setAuthUserBytes(
8630          com.google.protobuf.ByteString value) {
8631        if (value == null) {
8632    throw new NullPointerException();
8633  }
8634  bitField0_ |= 0x00000001;
8635        authUser_ = value;
8636        onChanged();
8637        return this;
8638      }
8639
8640      // @@protoc_insertion_point(builder_scope:hadoop.common.AuthUserResponseProto)
8641    }
8642
8643    static {
8644      defaultInstance = new AuthUserResponseProto(true);
8645      defaultInstance.initFields();
8646    }
8647
8648    // @@protoc_insertion_point(class_scope:hadoop.common.AuthUserResponseProto)
8649  }
8650
8651  private static com.google.protobuf.Descriptors.Descriptor
8652    internal_static_hadoop_common_EmptyRequestProto_descriptor;
8653  private static
8654    com.google.protobuf.GeneratedMessage.FieldAccessorTable
8655      internal_static_hadoop_common_EmptyRequestProto_fieldAccessorTable;
8656  private static com.google.protobuf.Descriptors.Descriptor
8657    internal_static_hadoop_common_EmptyResponseProto_descriptor;
8658  private static
8659    com.google.protobuf.GeneratedMessage.FieldAccessorTable
8660      internal_static_hadoop_common_EmptyResponseProto_fieldAccessorTable;
8661  private static com.google.protobuf.Descriptors.Descriptor
8662    internal_static_hadoop_common_EchoRequestProto_descriptor;
8663  private static
8664    com.google.protobuf.GeneratedMessage.FieldAccessorTable
8665      internal_static_hadoop_common_EchoRequestProto_fieldAccessorTable;
8666  private static com.google.protobuf.Descriptors.Descriptor
8667    internal_static_hadoop_common_EchoResponseProto_descriptor;
8668  private static
8669    com.google.protobuf.GeneratedMessage.FieldAccessorTable
8670      internal_static_hadoop_common_EchoResponseProto_fieldAccessorTable;
8671  private static com.google.protobuf.Descriptors.Descriptor
8672    internal_static_hadoop_common_OptRequestProto_descriptor;
8673  private static
8674    com.google.protobuf.GeneratedMessage.FieldAccessorTable
8675      internal_static_hadoop_common_OptRequestProto_fieldAccessorTable;
8676  private static com.google.protobuf.Descriptors.Descriptor
8677    internal_static_hadoop_common_OptResponseProto_descriptor;
8678  private static
8679    com.google.protobuf.GeneratedMessage.FieldAccessorTable
8680      internal_static_hadoop_common_OptResponseProto_fieldAccessorTable;
8681  private static com.google.protobuf.Descriptors.Descriptor
8682    internal_static_hadoop_common_SleepRequestProto_descriptor;
8683  private static
8684    com.google.protobuf.GeneratedMessage.FieldAccessorTable
8685      internal_static_hadoop_common_SleepRequestProto_fieldAccessorTable;
8686  private static com.google.protobuf.Descriptors.Descriptor
8687    internal_static_hadoop_common_SleepResponseProto_descriptor;
8688  private static
8689    com.google.protobuf.GeneratedMessage.FieldAccessorTable
8690      internal_static_hadoop_common_SleepResponseProto_fieldAccessorTable;
8691  private static com.google.protobuf.Descriptors.Descriptor
8692    internal_static_hadoop_common_SlowPingRequestProto_descriptor;
8693  private static
8694    com.google.protobuf.GeneratedMessage.FieldAccessorTable
8695      internal_static_hadoop_common_SlowPingRequestProto_fieldAccessorTable;
8696  private static com.google.protobuf.Descriptors.Descriptor
8697    internal_static_hadoop_common_EchoRequestProto2_descriptor;
8698  private static
8699    com.google.protobuf.GeneratedMessage.FieldAccessorTable
8700      internal_static_hadoop_common_EchoRequestProto2_fieldAccessorTable;
8701  private static com.google.protobuf.Descriptors.Descriptor
8702    internal_static_hadoop_common_EchoResponseProto2_descriptor;
8703  private static
8704    com.google.protobuf.GeneratedMessage.FieldAccessorTable
8705      internal_static_hadoop_common_EchoResponseProto2_fieldAccessorTable;
8706  private static com.google.protobuf.Descriptors.Descriptor
8707    internal_static_hadoop_common_AddRequestProto_descriptor;
8708  private static
8709    com.google.protobuf.GeneratedMessage.FieldAccessorTable
8710      internal_static_hadoop_common_AddRequestProto_fieldAccessorTable;
8711  private static com.google.protobuf.Descriptors.Descriptor
8712    internal_static_hadoop_common_AddRequestProto2_descriptor;
8713  private static
8714    com.google.protobuf.GeneratedMessage.FieldAccessorTable
8715      internal_static_hadoop_common_AddRequestProto2_fieldAccessorTable;
8716  private static com.google.protobuf.Descriptors.Descriptor
8717    internal_static_hadoop_common_AddResponseProto_descriptor;
8718  private static
8719    com.google.protobuf.GeneratedMessage.FieldAccessorTable
8720      internal_static_hadoop_common_AddResponseProto_fieldAccessorTable;
8721  private static com.google.protobuf.Descriptors.Descriptor
8722    internal_static_hadoop_common_ExchangeRequestProto_descriptor;
8723  private static
8724    com.google.protobuf.GeneratedMessage.FieldAccessorTable
8725      internal_static_hadoop_common_ExchangeRequestProto_fieldAccessorTable;
8726  private static com.google.protobuf.Descriptors.Descriptor
8727    internal_static_hadoop_common_ExchangeResponseProto_descriptor;
8728  private static
8729    com.google.protobuf.GeneratedMessage.FieldAccessorTable
8730      internal_static_hadoop_common_ExchangeResponseProto_fieldAccessorTable;
8731  private static com.google.protobuf.Descriptors.Descriptor
8732    internal_static_hadoop_common_AuthMethodResponseProto_descriptor;
8733  private static
8734    com.google.protobuf.GeneratedMessage.FieldAccessorTable
8735      internal_static_hadoop_common_AuthMethodResponseProto_fieldAccessorTable;
8736  private static com.google.protobuf.Descriptors.Descriptor
8737    internal_static_hadoop_common_AuthUserResponseProto_descriptor;
8738  private static
8739    com.google.protobuf.GeneratedMessage.FieldAccessorTable
8740      internal_static_hadoop_common_AuthUserResponseProto_fieldAccessorTable;
8741
8742  public static com.google.protobuf.Descriptors.FileDescriptor
8743      getDescriptor() {
8744    return descriptor;
8745  }
8746  private static com.google.protobuf.Descriptors.FileDescriptor
8747      descriptor;
8748  static {
8749    java.lang.String[] descriptorData = {
8750      "\n\ntest.proto\022\rhadoop.common\"\023\n\021EmptyRequ" +
8751      "estProto\"\024\n\022EmptyResponseProto\"#\n\020EchoRe" +
8752      "questProto\022\017\n\007message\030\001 \002(\t\"$\n\021EchoRespo" +
8753      "nseProto\022\017\n\007message\030\001 \002(\t\"\"\n\017OptRequestP" +
8754      "roto\022\017\n\007message\030\001 \001(\t\"#\n\020OptResponseProt" +
8755      "o\022\017\n\007message\030\001 \001(\t\")\n\021SleepRequestProto\022" +
8756      "\024\n\014milliSeconds\030\001 \002(\005\"\024\n\022SleepResponsePr" +
8757      "oto\"*\n\024SlowPingRequestProto\022\022\n\nshouldSlo" +
8758      "w\030\001 \002(\010\"$\n\021EchoRequestProto2\022\017\n\007message\030" +
8759      "\001 \003(\t\"%\n\022EchoResponseProto2\022\017\n\007message\030\001",
8760      " \003(\t\"1\n\017AddRequestProto\022\016\n\006param1\030\001 \002(\005\022" +
8761      "\016\n\006param2\030\002 \002(\005\"\"\n\020AddRequestProto2\022\016\n\006p" +
8762      "arams\030\001 \003(\005\"\"\n\020AddResponseProto\022\016\n\006resul" +
8763      "t\030\001 \002(\005\"&\n\024ExchangeRequestProto\022\016\n\006value" +
8764      "s\030\001 \003(\005\"\'\n\025ExchangeResponseProto\022\016\n\006valu" +
8765      "es\030\001 \003(\005\">\n\027AuthMethodResponseProto\022\014\n\004c" +
8766      "ode\030\001 \002(\005\022\025\n\rmechanismName\030\002 \002(\t\")\n\025Auth" +
8767      "UserResponseProto\022\020\n\010authUser\030\001 \002(\tB/\n\036o" +
8768      "rg.apache.hadoop.ipc.protobufB\nTestProto" +
8769      "s\240\001\001"
8770    };
8771    com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
8772      new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
8773        public com.google.protobuf.ExtensionRegistry assignDescriptors(
8774            com.google.protobuf.Descriptors.FileDescriptor root) {
8775          descriptor = root;
8776          internal_static_hadoop_common_EmptyRequestProto_descriptor =
8777            getDescriptor().getMessageTypes().get(0);
8778          internal_static_hadoop_common_EmptyRequestProto_fieldAccessorTable = new
8779            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
8780              internal_static_hadoop_common_EmptyRequestProto_descriptor,
8781              new java.lang.String[] { });
8782          internal_static_hadoop_common_EmptyResponseProto_descriptor =
8783            getDescriptor().getMessageTypes().get(1);
8784          internal_static_hadoop_common_EmptyResponseProto_fieldAccessorTable = new
8785            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
8786              internal_static_hadoop_common_EmptyResponseProto_descriptor,
8787              new java.lang.String[] { });
8788          internal_static_hadoop_common_EchoRequestProto_descriptor =
8789            getDescriptor().getMessageTypes().get(2);
8790          internal_static_hadoop_common_EchoRequestProto_fieldAccessorTable = new
8791            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
8792              internal_static_hadoop_common_EchoRequestProto_descriptor,
8793              new java.lang.String[] { "Message", });
8794          internal_static_hadoop_common_EchoResponseProto_descriptor =
8795            getDescriptor().getMessageTypes().get(3);
8796          internal_static_hadoop_common_EchoResponseProto_fieldAccessorTable = new
8797            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
8798              internal_static_hadoop_common_EchoResponseProto_descriptor,
8799              new java.lang.String[] { "Message", });
8800          internal_static_hadoop_common_OptRequestProto_descriptor =
8801            getDescriptor().getMessageTypes().get(4);
8802          internal_static_hadoop_common_OptRequestProto_fieldAccessorTable = new
8803            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
8804              internal_static_hadoop_common_OptRequestProto_descriptor,
8805              new java.lang.String[] { "Message", });
8806          internal_static_hadoop_common_OptResponseProto_descriptor =
8807            getDescriptor().getMessageTypes().get(5);
8808          internal_static_hadoop_common_OptResponseProto_fieldAccessorTable = new
8809            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
8810              internal_static_hadoop_common_OptResponseProto_descriptor,
8811              new java.lang.String[] { "Message", });
8812          internal_static_hadoop_common_SleepRequestProto_descriptor =
8813            getDescriptor().getMessageTypes().get(6);
8814          internal_static_hadoop_common_SleepRequestProto_fieldAccessorTable = new
8815            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
8816              internal_static_hadoop_common_SleepRequestProto_descriptor,
8817              new java.lang.String[] { "MilliSeconds", });
8818          internal_static_hadoop_common_SleepResponseProto_descriptor =
8819            getDescriptor().getMessageTypes().get(7);
8820          internal_static_hadoop_common_SleepResponseProto_fieldAccessorTable = new
8821            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
8822              internal_static_hadoop_common_SleepResponseProto_descriptor,
8823              new java.lang.String[] { });
8824          internal_static_hadoop_common_SlowPingRequestProto_descriptor =
8825            getDescriptor().getMessageTypes().get(8);
8826          internal_static_hadoop_common_SlowPingRequestProto_fieldAccessorTable = new
8827            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
8828              internal_static_hadoop_common_SlowPingRequestProto_descriptor,
8829              new java.lang.String[] { "ShouldSlow", });
8830          internal_static_hadoop_common_EchoRequestProto2_descriptor =
8831            getDescriptor().getMessageTypes().get(9);
8832          internal_static_hadoop_common_EchoRequestProto2_fieldAccessorTable = new
8833            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
8834              internal_static_hadoop_common_EchoRequestProto2_descriptor,
8835              new java.lang.String[] { "Message", });
8836          internal_static_hadoop_common_EchoResponseProto2_descriptor =
8837            getDescriptor().getMessageTypes().get(10);
8838          internal_static_hadoop_common_EchoResponseProto2_fieldAccessorTable = new
8839            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
8840              internal_static_hadoop_common_EchoResponseProto2_descriptor,
8841              new java.lang.String[] { "Message", });
8842          internal_static_hadoop_common_AddRequestProto_descriptor =
8843            getDescriptor().getMessageTypes().get(11);
8844          internal_static_hadoop_common_AddRequestProto_fieldAccessorTable = new
8845            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
8846              internal_static_hadoop_common_AddRequestProto_descriptor,
8847              new java.lang.String[] { "Param1", "Param2", });
8848          internal_static_hadoop_common_AddRequestProto2_descriptor =
8849            getDescriptor().getMessageTypes().get(12);
8850          internal_static_hadoop_common_AddRequestProto2_fieldAccessorTable = new
8851            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
8852              internal_static_hadoop_common_AddRequestProto2_descriptor,
8853              new java.lang.String[] { "Params", });
8854          internal_static_hadoop_common_AddResponseProto_descriptor =
8855            getDescriptor().getMessageTypes().get(13);
8856          internal_static_hadoop_common_AddResponseProto_fieldAccessorTable = new
8857            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
8858              internal_static_hadoop_common_AddResponseProto_descriptor,
8859              new java.lang.String[] { "Result", });
8860          internal_static_hadoop_common_ExchangeRequestProto_descriptor =
8861            getDescriptor().getMessageTypes().get(14);
8862          internal_static_hadoop_common_ExchangeRequestProto_fieldAccessorTable = new
8863            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
8864              internal_static_hadoop_common_ExchangeRequestProto_descriptor,
8865              new java.lang.String[] { "Values", });
8866          internal_static_hadoop_common_ExchangeResponseProto_descriptor =
8867            getDescriptor().getMessageTypes().get(15);
8868          internal_static_hadoop_common_ExchangeResponseProto_fieldAccessorTable = new
8869            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
8870              internal_static_hadoop_common_ExchangeResponseProto_descriptor,
8871              new java.lang.String[] { "Values", });
8872          internal_static_hadoop_common_AuthMethodResponseProto_descriptor =
8873            getDescriptor().getMessageTypes().get(16);
8874          internal_static_hadoop_common_AuthMethodResponseProto_fieldAccessorTable = new
8875            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
8876              internal_static_hadoop_common_AuthMethodResponseProto_descriptor,
8877              new java.lang.String[] { "Code", "MechanismName", });
8878          internal_static_hadoop_common_AuthUserResponseProto_descriptor =
8879            getDescriptor().getMessageTypes().get(17);
8880          internal_static_hadoop_common_AuthUserResponseProto_fieldAccessorTable = new
8881            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
8882              internal_static_hadoop_common_AuthUserResponseProto_descriptor,
8883              new java.lang.String[] { "AuthUser", });
8884          return null;
8885        }
8886      };
8887    com.google.protobuf.Descriptors.FileDescriptor
8888      .internalBuildGeneratedFileFrom(descriptorData,
8889        new com.google.protobuf.Descriptors.FileDescriptor[] {
8890        }, assigner);
8891  }
8892
8893  // @@protoc_insertion_point(outer_class_scope)
8894}