001// Generated by the protocol buffer compiler.  DO NOT EDIT!
002// source: test.proto
003
004package org.apache.hadoop.ipc.protobuf;
005
006public final class TestProtos {
007  private TestProtos() {}
008  public static void registerAllExtensions(
009      com.google.protobuf.ExtensionRegistry registry) {
010  }
011  public interface EmptyRequestProtoOrBuilder
012      extends com.google.protobuf.MessageOrBuilder {
013  }
014  /**
015   * Protobuf type {@code hadoop.common.EmptyRequestProto}
016   */
017  public static final class EmptyRequestProto extends
018      com.google.protobuf.GeneratedMessage
019      implements EmptyRequestProtoOrBuilder {
020    // Use EmptyRequestProto.newBuilder() to construct.
021    private EmptyRequestProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
022      super(builder);
023      this.unknownFields = builder.getUnknownFields();
024    }
025    private EmptyRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
026
027    private static final EmptyRequestProto defaultInstance;
028    public static EmptyRequestProto getDefaultInstance() {
029      return defaultInstance;
030    }
031
032    public EmptyRequestProto getDefaultInstanceForType() {
033      return defaultInstance;
034    }
035
036    private final com.google.protobuf.UnknownFieldSet unknownFields;
037    @java.lang.Override
038    public final com.google.protobuf.UnknownFieldSet
039        getUnknownFields() {
040      return this.unknownFields;
041    }
042    private EmptyRequestProto(
043        com.google.protobuf.CodedInputStream input,
044        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
045        throws com.google.protobuf.InvalidProtocolBufferException {
046      initFields();
047      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
048          com.google.protobuf.UnknownFieldSet.newBuilder();
049      try {
050        boolean done = false;
051        while (!done) {
052          int tag = input.readTag();
053          switch (tag) {
054            case 0:
055              done = true;
056              break;
057            default: {
058              if (!parseUnknownField(input, unknownFields,
059                                     extensionRegistry, tag)) {
060                done = true;
061              }
062              break;
063            }
064          }
065        }
066      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
067        throw e.setUnfinishedMessage(this);
068      } catch (java.io.IOException e) {
069        throw new com.google.protobuf.InvalidProtocolBufferException(
070            e.getMessage()).setUnfinishedMessage(this);
071      } finally {
072        this.unknownFields = unknownFields.build();
073        makeExtensionsImmutable();
074      }
075    }
076    public static final com.google.protobuf.Descriptors.Descriptor
077        getDescriptor() {
078      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EmptyRequestProto_descriptor;
079    }
080
081    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
082        internalGetFieldAccessorTable() {
083      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EmptyRequestProto_fieldAccessorTable
084          .ensureFieldAccessorsInitialized(
085              org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.class, org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.Builder.class);
086    }
087
088    public static com.google.protobuf.Parser<EmptyRequestProto> PARSER =
089        new com.google.protobuf.AbstractParser<EmptyRequestProto>() {
090      public EmptyRequestProto parsePartialFrom(
091          com.google.protobuf.CodedInputStream input,
092          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
093          throws com.google.protobuf.InvalidProtocolBufferException {
094        return new EmptyRequestProto(input, extensionRegistry);
095      }
096    };
097
098    @java.lang.Override
099    public com.google.protobuf.Parser<EmptyRequestProto> getParserForType() {
100      return PARSER;
101    }
102
103    private void initFields() {
104    }
105    private byte memoizedIsInitialized = -1;
106    public final boolean isInitialized() {
107      byte isInitialized = memoizedIsInitialized;
108      if (isInitialized != -1) return isInitialized == 1;
109
110      memoizedIsInitialized = 1;
111      return true;
112    }
113
114    public void writeTo(com.google.protobuf.CodedOutputStream output)
115                        throws java.io.IOException {
116      getSerializedSize();
117      getUnknownFields().writeTo(output);
118    }
119
120    private int memoizedSerializedSize = -1;
121    public int getSerializedSize() {
122      int size = memoizedSerializedSize;
123      if (size != -1) return size;
124
125      size = 0;
126      size += getUnknownFields().getSerializedSize();
127      memoizedSerializedSize = size;
128      return size;
129    }
130
131    private static final long serialVersionUID = 0L;
132    @java.lang.Override
133    protected java.lang.Object writeReplace()
134        throws java.io.ObjectStreamException {
135      return super.writeReplace();
136    }
137
138    @java.lang.Override
139    public boolean equals(final java.lang.Object obj) {
140      if (obj == this) {
141       return true;
142      }
143      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)) {
144        return super.equals(obj);
145      }
146      org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto other = (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto) obj;
147
148      boolean result = true;
149      result = result &&
150          getUnknownFields().equals(other.getUnknownFields());
151      return result;
152    }
153
154    private int memoizedHashCode = 0;
155    @java.lang.Override
156    public int hashCode() {
157      if (memoizedHashCode != 0) {
158        return memoizedHashCode;
159      }
160      int hash = 41;
161      hash = (19 * hash) + getDescriptorForType().hashCode();
162      hash = (29 * hash) + getUnknownFields().hashCode();
163      memoizedHashCode = hash;
164      return hash;
165    }
166
167    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(
168        com.google.protobuf.ByteString data)
169        throws com.google.protobuf.InvalidProtocolBufferException {
170      return PARSER.parseFrom(data);
171    }
172    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(
173        com.google.protobuf.ByteString data,
174        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
175        throws com.google.protobuf.InvalidProtocolBufferException {
176      return PARSER.parseFrom(data, extensionRegistry);
177    }
178    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(byte[] data)
179        throws com.google.protobuf.InvalidProtocolBufferException {
180      return PARSER.parseFrom(data);
181    }
182    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(
183        byte[] data,
184        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
185        throws com.google.protobuf.InvalidProtocolBufferException {
186      return PARSER.parseFrom(data, extensionRegistry);
187    }
188    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(java.io.InputStream input)
189        throws java.io.IOException {
190      return PARSER.parseFrom(input);
191    }
192    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(
193        java.io.InputStream input,
194        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
195        throws java.io.IOException {
196      return PARSER.parseFrom(input, extensionRegistry);
197    }
198    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseDelimitedFrom(java.io.InputStream input)
199        throws java.io.IOException {
200      return PARSER.parseDelimitedFrom(input);
201    }
202    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseDelimitedFrom(
203        java.io.InputStream input,
204        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
205        throws java.io.IOException {
206      return PARSER.parseDelimitedFrom(input, extensionRegistry);
207    }
208    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(
209        com.google.protobuf.CodedInputStream input)
210        throws java.io.IOException {
211      return PARSER.parseFrom(input);
212    }
213    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(
214        com.google.protobuf.CodedInputStream input,
215        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
216        throws java.io.IOException {
217      return PARSER.parseFrom(input, extensionRegistry);
218    }
219
220    public static Builder newBuilder() { return Builder.create(); }
221    public Builder newBuilderForType() { return newBuilder(); }
222    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto prototype) {
223      return newBuilder().mergeFrom(prototype);
224    }
225    public Builder toBuilder() { return newBuilder(this); }
226
227    @java.lang.Override
228    protected Builder newBuilderForType(
229        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
230      Builder builder = new Builder(parent);
231      return builder;
232    }
233    /**
234     * Protobuf type {@code hadoop.common.EmptyRequestProto}
235     */
236    public static final class Builder extends
237        com.google.protobuf.GeneratedMessage.Builder<Builder>
238       implements org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProtoOrBuilder {
239      public static final com.google.protobuf.Descriptors.Descriptor
240          getDescriptor() {
241        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EmptyRequestProto_descriptor;
242      }
243
244      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
245          internalGetFieldAccessorTable() {
246        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EmptyRequestProto_fieldAccessorTable
247            .ensureFieldAccessorsInitialized(
248                org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.class, org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.Builder.class);
249      }
250
251      // Construct using org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.newBuilder()
252      private Builder() {
253        maybeForceBuilderInitialization();
254      }
255
256      private Builder(
257          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
258        super(parent);
259        maybeForceBuilderInitialization();
260      }
261      private void maybeForceBuilderInitialization() {
262        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
263        }
264      }
265      private static Builder create() {
266        return new Builder();
267      }
268
269      public Builder clear() {
270        super.clear();
271        return this;
272      }
273
274      public Builder clone() {
275        return create().mergeFrom(buildPartial());
276      }
277
278      public com.google.protobuf.Descriptors.Descriptor
279          getDescriptorForType() {
280        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EmptyRequestProto_descriptor;
281      }
282
283      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto getDefaultInstanceForType() {
284        return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
285      }
286
287      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto build() {
288        org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto result = buildPartial();
289        if (!result.isInitialized()) {
290          throw newUninitializedMessageException(result);
291        }
292        return result;
293      }
294
295      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto buildPartial() {
296        org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto result = new org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto(this);
297        onBuilt();
298        return result;
299      }
300
301      public Builder mergeFrom(com.google.protobuf.Message other) {
302        if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto) {
303          return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)other);
304        } else {
305          super.mergeFrom(other);
306          return this;
307        }
308      }
309
310      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto other) {
311        if (other == org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance()) return this;
312        this.mergeUnknownFields(other.getUnknownFields());
313        return this;
314      }
315
316      public final boolean isInitialized() {
317        return true;
318      }
319
320      public Builder mergeFrom(
321          com.google.protobuf.CodedInputStream input,
322          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
323          throws java.io.IOException {
324        org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parsedMessage = null;
325        try {
326          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
327        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
328          parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto) e.getUnfinishedMessage();
329          throw e;
330        } finally {
331          if (parsedMessage != null) {
332            mergeFrom(parsedMessage);
333          }
334        }
335        return this;
336      }
337
338      // @@protoc_insertion_point(builder_scope:hadoop.common.EmptyRequestProto)
339    }
340
341    static {
342      defaultInstance = new EmptyRequestProto(true);
343      defaultInstance.initFields();
344    }
345
346    // @@protoc_insertion_point(class_scope:hadoop.common.EmptyRequestProto)
347  }
348
349  public interface EmptyResponseProtoOrBuilder
350      extends com.google.protobuf.MessageOrBuilder {
351  }
352  /**
353   * Protobuf type {@code hadoop.common.EmptyResponseProto}
354   */
355  public static final class EmptyResponseProto extends
356      com.google.protobuf.GeneratedMessage
357      implements EmptyResponseProtoOrBuilder {
358    // Use EmptyResponseProto.newBuilder() to construct.
359    private EmptyResponseProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
360      super(builder);
361      this.unknownFields = builder.getUnknownFields();
362    }
363    private EmptyResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
364
365    private static final EmptyResponseProto defaultInstance;
366    public static EmptyResponseProto getDefaultInstance() {
367      return defaultInstance;
368    }
369
370    public EmptyResponseProto getDefaultInstanceForType() {
371      return defaultInstance;
372    }
373
374    private final com.google.protobuf.UnknownFieldSet unknownFields;
375    @java.lang.Override
376    public final com.google.protobuf.UnknownFieldSet
377        getUnknownFields() {
378      return this.unknownFields;
379    }
380    private EmptyResponseProto(
381        com.google.protobuf.CodedInputStream input,
382        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
383        throws com.google.protobuf.InvalidProtocolBufferException {
384      initFields();
385      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
386          com.google.protobuf.UnknownFieldSet.newBuilder();
387      try {
388        boolean done = false;
389        while (!done) {
390          int tag = input.readTag();
391          switch (tag) {
392            case 0:
393              done = true;
394              break;
395            default: {
396              if (!parseUnknownField(input, unknownFields,
397                                     extensionRegistry, tag)) {
398                done = true;
399              }
400              break;
401            }
402          }
403        }
404      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
405        throw e.setUnfinishedMessage(this);
406      } catch (java.io.IOException e) {
407        throw new com.google.protobuf.InvalidProtocolBufferException(
408            e.getMessage()).setUnfinishedMessage(this);
409      } finally {
410        this.unknownFields = unknownFields.build();
411        makeExtensionsImmutable();
412      }
413    }
414    public static final com.google.protobuf.Descriptors.Descriptor
415        getDescriptor() {
416      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EmptyResponseProto_descriptor;
417    }
418
419    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
420        internalGetFieldAccessorTable() {
421      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EmptyResponseProto_fieldAccessorTable
422          .ensureFieldAccessorsInitialized(
423              org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.Builder.class);
424    }
425
426    public static com.google.protobuf.Parser<EmptyResponseProto> PARSER =
427        new com.google.protobuf.AbstractParser<EmptyResponseProto>() {
428      public EmptyResponseProto parsePartialFrom(
429          com.google.protobuf.CodedInputStream input,
430          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
431          throws com.google.protobuf.InvalidProtocolBufferException {
432        return new EmptyResponseProto(input, extensionRegistry);
433      }
434    };
435
436    @java.lang.Override
437    public com.google.protobuf.Parser<EmptyResponseProto> getParserForType() {
438      return PARSER;
439    }
440
441    private void initFields() {
442    }
443    private byte memoizedIsInitialized = -1;
444    public final boolean isInitialized() {
445      byte isInitialized = memoizedIsInitialized;
446      if (isInitialized != -1) return isInitialized == 1;
447
448      memoizedIsInitialized = 1;
449      return true;
450    }
451
452    public void writeTo(com.google.protobuf.CodedOutputStream output)
453                        throws java.io.IOException {
454      getSerializedSize();
455      getUnknownFields().writeTo(output);
456    }
457
458    private int memoizedSerializedSize = -1;
459    public int getSerializedSize() {
460      int size = memoizedSerializedSize;
461      if (size != -1) return size;
462
463      size = 0;
464      size += getUnknownFields().getSerializedSize();
465      memoizedSerializedSize = size;
466      return size;
467    }
468
469    private static final long serialVersionUID = 0L;
470    @java.lang.Override
471    protected java.lang.Object writeReplace()
472        throws java.io.ObjectStreamException {
473      return super.writeReplace();
474    }
475
476    @java.lang.Override
477    public boolean equals(final java.lang.Object obj) {
478      if (obj == this) {
479       return true;
480      }
481      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto)) {
482        return super.equals(obj);
483      }
484      org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto other = (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto) obj;
485
486      boolean result = true;
487      result = result &&
488          getUnknownFields().equals(other.getUnknownFields());
489      return result;
490    }
491
492    private int memoizedHashCode = 0;
493    @java.lang.Override
494    public int hashCode() {
495      if (memoizedHashCode != 0) {
496        return memoizedHashCode;
497      }
498      int hash = 41;
499      hash = (19 * hash) + getDescriptorForType().hashCode();
500      hash = (29 * hash) + getUnknownFields().hashCode();
501      memoizedHashCode = hash;
502      return hash;
503    }
504
505    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(
506        com.google.protobuf.ByteString data)
507        throws com.google.protobuf.InvalidProtocolBufferException {
508      return PARSER.parseFrom(data);
509    }
510    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(
511        com.google.protobuf.ByteString data,
512        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
513        throws com.google.protobuf.InvalidProtocolBufferException {
514      return PARSER.parseFrom(data, extensionRegistry);
515    }
516    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(byte[] data)
517        throws com.google.protobuf.InvalidProtocolBufferException {
518      return PARSER.parseFrom(data);
519    }
520    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(
521        byte[] data,
522        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
523        throws com.google.protobuf.InvalidProtocolBufferException {
524      return PARSER.parseFrom(data, extensionRegistry);
525    }
526    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(java.io.InputStream input)
527        throws java.io.IOException {
528      return PARSER.parseFrom(input);
529    }
530    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(
531        java.io.InputStream input,
532        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
533        throws java.io.IOException {
534      return PARSER.parseFrom(input, extensionRegistry);
535    }
536    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseDelimitedFrom(java.io.InputStream input)
537        throws java.io.IOException {
538      return PARSER.parseDelimitedFrom(input);
539    }
540    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseDelimitedFrom(
541        java.io.InputStream input,
542        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
543        throws java.io.IOException {
544      return PARSER.parseDelimitedFrom(input, extensionRegistry);
545    }
546    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(
547        com.google.protobuf.CodedInputStream input)
548        throws java.io.IOException {
549      return PARSER.parseFrom(input);
550    }
551    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(
552        com.google.protobuf.CodedInputStream input,
553        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
554        throws java.io.IOException {
555      return PARSER.parseFrom(input, extensionRegistry);
556    }
557
558    public static Builder newBuilder() { return Builder.create(); }
559    public Builder newBuilderForType() { return newBuilder(); }
560    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto prototype) {
561      return newBuilder().mergeFrom(prototype);
562    }
563    public Builder toBuilder() { return newBuilder(this); }
564
565    @java.lang.Override
566    protected Builder newBuilderForType(
567        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
568      Builder builder = new Builder(parent);
569      return builder;
570    }
571    /**
572     * Protobuf type {@code hadoop.common.EmptyResponseProto}
573     */
574    public static final class Builder extends
575        com.google.protobuf.GeneratedMessage.Builder<Builder>
576       implements org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProtoOrBuilder {
577      public static final com.google.protobuf.Descriptors.Descriptor
578          getDescriptor() {
579        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EmptyResponseProto_descriptor;
580      }
581
582      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
583          internalGetFieldAccessorTable() {
584        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EmptyResponseProto_fieldAccessorTable
585            .ensureFieldAccessorsInitialized(
586                org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.Builder.class);
587      }
588
589      // Construct using org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.newBuilder()
590      private Builder() {
591        maybeForceBuilderInitialization();
592      }
593
594      private Builder(
595          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
596        super(parent);
597        maybeForceBuilderInitialization();
598      }
599      private void maybeForceBuilderInitialization() {
600        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
601        }
602      }
603      private static Builder create() {
604        return new Builder();
605      }
606
607      public Builder clear() {
608        super.clear();
609        return this;
610      }
611
612      public Builder clone() {
613        return create().mergeFrom(buildPartial());
614      }
615
616      public com.google.protobuf.Descriptors.Descriptor
617          getDescriptorForType() {
618        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EmptyResponseProto_descriptor;
619      }
620
621      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto getDefaultInstanceForType() {
622        return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
623      }
624
625      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto build() {
626        org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto result = buildPartial();
627        if (!result.isInitialized()) {
628          throw newUninitializedMessageException(result);
629        }
630        return result;
631      }
632
633      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto buildPartial() {
634        org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto result = new org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto(this);
635        onBuilt();
636        return result;
637      }
638
639      public Builder mergeFrom(com.google.protobuf.Message other) {
640        if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto) {
641          return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto)other);
642        } else {
643          super.mergeFrom(other);
644          return this;
645        }
646      }
647
648      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto other) {
649        if (other == org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance()) return this;
650        this.mergeUnknownFields(other.getUnknownFields());
651        return this;
652      }
653
654      public final boolean isInitialized() {
655        return true;
656      }
657
658      public Builder mergeFrom(
659          com.google.protobuf.CodedInputStream input,
660          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
661          throws java.io.IOException {
662        org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parsedMessage = null;
663        try {
664          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
665        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
666          parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto) e.getUnfinishedMessage();
667          throw e;
668        } finally {
669          if (parsedMessage != null) {
670            mergeFrom(parsedMessage);
671          }
672        }
673        return this;
674      }
675
676      // @@protoc_insertion_point(builder_scope:hadoop.common.EmptyResponseProto)
677    }
678
679    static {
680      defaultInstance = new EmptyResponseProto(true);
681      defaultInstance.initFields();
682    }
683
684    // @@protoc_insertion_point(class_scope:hadoop.common.EmptyResponseProto)
685  }
686
687  public interface EchoRequestProtoOrBuilder
688      extends com.google.protobuf.MessageOrBuilder {
689
690    // required string message = 1;
691    /**
692     * <code>required string message = 1;</code>
693     */
694    boolean hasMessage();
695    /**
696     * <code>required string message = 1;</code>
697     */
698    java.lang.String getMessage();
699    /**
700     * <code>required string message = 1;</code>
701     */
702    com.google.protobuf.ByteString
703        getMessageBytes();
704  }
705  /**
706   * Protobuf type {@code hadoop.common.EchoRequestProto}
707   */
708  public static final class EchoRequestProto extends
709      com.google.protobuf.GeneratedMessage
710      implements EchoRequestProtoOrBuilder {
711    // Use EchoRequestProto.newBuilder() to construct.
712    private EchoRequestProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
713      super(builder);
714      this.unknownFields = builder.getUnknownFields();
715    }
716    private EchoRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
717
718    private static final EchoRequestProto defaultInstance;
719    public static EchoRequestProto getDefaultInstance() {
720      return defaultInstance;
721    }
722
723    public EchoRequestProto getDefaultInstanceForType() {
724      return defaultInstance;
725    }
726
727    private final com.google.protobuf.UnknownFieldSet unknownFields;
728    @java.lang.Override
729    public final com.google.protobuf.UnknownFieldSet
730        getUnknownFields() {
731      return this.unknownFields;
732    }
733    private EchoRequestProto(
734        com.google.protobuf.CodedInputStream input,
735        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
736        throws com.google.protobuf.InvalidProtocolBufferException {
737      initFields();
738      int mutable_bitField0_ = 0;
739      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
740          com.google.protobuf.UnknownFieldSet.newBuilder();
741      try {
742        boolean done = false;
743        while (!done) {
744          int tag = input.readTag();
745          switch (tag) {
746            case 0:
747              done = true;
748              break;
749            default: {
750              if (!parseUnknownField(input, unknownFields,
751                                     extensionRegistry, tag)) {
752                done = true;
753              }
754              break;
755            }
756            case 10: {
757              bitField0_ |= 0x00000001;
758              message_ = input.readBytes();
759              break;
760            }
761          }
762        }
763      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
764        throw e.setUnfinishedMessage(this);
765      } catch (java.io.IOException e) {
766        throw new com.google.protobuf.InvalidProtocolBufferException(
767            e.getMessage()).setUnfinishedMessage(this);
768      } finally {
769        this.unknownFields = unknownFields.build();
770        makeExtensionsImmutable();
771      }
772    }
773    public static final com.google.protobuf.Descriptors.Descriptor
774        getDescriptor() {
775      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoRequestProto_descriptor;
776    }
777
778    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
779        internalGetFieldAccessorTable() {
780      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoRequestProto_fieldAccessorTable
781          .ensureFieldAccessorsInitialized(
782              org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.class, org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.Builder.class);
783    }
784
785    public static com.google.protobuf.Parser<EchoRequestProto> PARSER =
786        new com.google.protobuf.AbstractParser<EchoRequestProto>() {
787      public EchoRequestProto parsePartialFrom(
788          com.google.protobuf.CodedInputStream input,
789          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
790          throws com.google.protobuf.InvalidProtocolBufferException {
791        return new EchoRequestProto(input, extensionRegistry);
792      }
793    };
794
795    @java.lang.Override
796    public com.google.protobuf.Parser<EchoRequestProto> getParserForType() {
797      return PARSER;
798    }
799
800    private int bitField0_;
801    // required string message = 1;
802    public static final int MESSAGE_FIELD_NUMBER = 1;
803    private java.lang.Object message_;
804    /**
805     * <code>required string message = 1;</code>
806     */
807    public boolean hasMessage() {
808      return ((bitField0_ & 0x00000001) == 0x00000001);
809    }
810    /**
811     * <code>required string message = 1;</code>
812     */
813    public java.lang.String getMessage() {
814      java.lang.Object ref = message_;
815      if (ref instanceof java.lang.String) {
816        return (java.lang.String) ref;
817      } else {
818        com.google.protobuf.ByteString bs = 
819            (com.google.protobuf.ByteString) ref;
820        java.lang.String s = bs.toStringUtf8();
821        if (bs.isValidUtf8()) {
822          message_ = s;
823        }
824        return s;
825      }
826    }
827    /**
828     * <code>required string message = 1;</code>
829     */
830    public com.google.protobuf.ByteString
831        getMessageBytes() {
832      java.lang.Object ref = message_;
833      if (ref instanceof java.lang.String) {
834        com.google.protobuf.ByteString b = 
835            com.google.protobuf.ByteString.copyFromUtf8(
836                (java.lang.String) ref);
837        message_ = b;
838        return b;
839      } else {
840        return (com.google.protobuf.ByteString) ref;
841      }
842    }
843
844    private void initFields() {
845      message_ = "";
846    }
847    private byte memoizedIsInitialized = -1;
848    public final boolean isInitialized() {
849      byte isInitialized = memoizedIsInitialized;
850      if (isInitialized != -1) return isInitialized == 1;
851
852      if (!hasMessage()) {
853        memoizedIsInitialized = 0;
854        return false;
855      }
856      memoizedIsInitialized = 1;
857      return true;
858    }
859
860    public void writeTo(com.google.protobuf.CodedOutputStream output)
861                        throws java.io.IOException {
862      getSerializedSize();
863      if (((bitField0_ & 0x00000001) == 0x00000001)) {
864        output.writeBytes(1, getMessageBytes());
865      }
866      getUnknownFields().writeTo(output);
867    }
868
869    private int memoizedSerializedSize = -1;
870    public int getSerializedSize() {
871      int size = memoizedSerializedSize;
872      if (size != -1) return size;
873
874      size = 0;
875      if (((bitField0_ & 0x00000001) == 0x00000001)) {
876        size += com.google.protobuf.CodedOutputStream
877          .computeBytesSize(1, getMessageBytes());
878      }
879      size += getUnknownFields().getSerializedSize();
880      memoizedSerializedSize = size;
881      return size;
882    }
883
884    private static final long serialVersionUID = 0L;
885    @java.lang.Override
886    protected java.lang.Object writeReplace()
887        throws java.io.ObjectStreamException {
888      return super.writeReplace();
889    }
890
891    @java.lang.Override
892    public boolean equals(final java.lang.Object obj) {
893      if (obj == this) {
894       return true;
895      }
896      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto)) {
897        return super.equals(obj);
898      }
899      org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto other = (org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto) obj;
900
901      boolean result = true;
902      result = result && (hasMessage() == other.hasMessage());
903      if (hasMessage()) {
904        result = result && getMessage()
905            .equals(other.getMessage());
906      }
907      result = result &&
908          getUnknownFields().equals(other.getUnknownFields());
909      return result;
910    }
911
912    private int memoizedHashCode = 0;
913    @java.lang.Override
914    public int hashCode() {
915      if (memoizedHashCode != 0) {
916        return memoizedHashCode;
917      }
918      int hash = 41;
919      hash = (19 * hash) + getDescriptorForType().hashCode();
920      if (hasMessage()) {
921        hash = (37 * hash) + MESSAGE_FIELD_NUMBER;
922        hash = (53 * hash) + getMessage().hashCode();
923      }
924      hash = (29 * hash) + getUnknownFields().hashCode();
925      memoizedHashCode = hash;
926      return hash;
927    }
928
929    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(
930        com.google.protobuf.ByteString data)
931        throws com.google.protobuf.InvalidProtocolBufferException {
932      return PARSER.parseFrom(data);
933    }
934    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(
935        com.google.protobuf.ByteString data,
936        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
937        throws com.google.protobuf.InvalidProtocolBufferException {
938      return PARSER.parseFrom(data, extensionRegistry);
939    }
940    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(byte[] data)
941        throws com.google.protobuf.InvalidProtocolBufferException {
942      return PARSER.parseFrom(data);
943    }
944    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(
945        byte[] data,
946        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
947        throws com.google.protobuf.InvalidProtocolBufferException {
948      return PARSER.parseFrom(data, extensionRegistry);
949    }
950    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(java.io.InputStream input)
951        throws java.io.IOException {
952      return PARSER.parseFrom(input);
953    }
954    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(
955        java.io.InputStream input,
956        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
957        throws java.io.IOException {
958      return PARSER.parseFrom(input, extensionRegistry);
959    }
960    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseDelimitedFrom(java.io.InputStream input)
961        throws java.io.IOException {
962      return PARSER.parseDelimitedFrom(input);
963    }
964    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseDelimitedFrom(
965        java.io.InputStream input,
966        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
967        throws java.io.IOException {
968      return PARSER.parseDelimitedFrom(input, extensionRegistry);
969    }
970    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(
971        com.google.protobuf.CodedInputStream input)
972        throws java.io.IOException {
973      return PARSER.parseFrom(input);
974    }
975    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(
976        com.google.protobuf.CodedInputStream input,
977        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
978        throws java.io.IOException {
979      return PARSER.parseFrom(input, extensionRegistry);
980    }
981
982    public static Builder newBuilder() { return Builder.create(); }
983    public Builder newBuilderForType() { return newBuilder(); }
984    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto prototype) {
985      return newBuilder().mergeFrom(prototype);
986    }
987    public Builder toBuilder() { return newBuilder(this); }
988
989    @java.lang.Override
990    protected Builder newBuilderForType(
991        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
992      Builder builder = new Builder(parent);
993      return builder;
994    }
995    /**
996     * Protobuf type {@code hadoop.common.EchoRequestProto}
997     */
998    public static final class Builder extends
999        com.google.protobuf.GeneratedMessage.Builder<Builder>
1000       implements org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProtoOrBuilder {
1001      public static final com.google.protobuf.Descriptors.Descriptor
1002          getDescriptor() {
1003        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoRequestProto_descriptor;
1004      }
1005
1006      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1007          internalGetFieldAccessorTable() {
1008        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoRequestProto_fieldAccessorTable
1009            .ensureFieldAccessorsInitialized(
1010                org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.class, org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.Builder.class);
1011      }
1012
1013      // Construct using org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.newBuilder()
1014      private Builder() {
1015        maybeForceBuilderInitialization();
1016      }
1017
1018      private Builder(
1019          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1020        super(parent);
1021        maybeForceBuilderInitialization();
1022      }
1023      private void maybeForceBuilderInitialization() {
1024        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
1025        }
1026      }
1027      private static Builder create() {
1028        return new Builder();
1029      }
1030
1031      public Builder clear() {
1032        super.clear();
1033        message_ = "";
1034        bitField0_ = (bitField0_ & ~0x00000001);
1035        return this;
1036      }
1037
1038      public Builder clone() {
1039        return create().mergeFrom(buildPartial());
1040      }
1041
1042      public com.google.protobuf.Descriptors.Descriptor
1043          getDescriptorForType() {
1044        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoRequestProto_descriptor;
1045      }
1046
1047      public org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto getDefaultInstanceForType() {
1048        return org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.getDefaultInstance();
1049      }
1050
1051      public org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto build() {
1052        org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto result = buildPartial();
1053        if (!result.isInitialized()) {
1054          throw newUninitializedMessageException(result);
1055        }
1056        return result;
1057      }
1058
1059      public org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto buildPartial() {
1060        org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto result = new org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto(this);
1061        int from_bitField0_ = bitField0_;
1062        int to_bitField0_ = 0;
1063        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
1064          to_bitField0_ |= 0x00000001;
1065        }
1066        result.message_ = message_;
1067        result.bitField0_ = to_bitField0_;
1068        onBuilt();
1069        return result;
1070      }
1071
1072      public Builder mergeFrom(com.google.protobuf.Message other) {
1073        if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto) {
1074          return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto)other);
1075        } else {
1076          super.mergeFrom(other);
1077          return this;
1078        }
1079      }
1080
1081      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto other) {
1082        if (other == org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.getDefaultInstance()) return this;
1083        if (other.hasMessage()) {
1084          bitField0_ |= 0x00000001;
1085          message_ = other.message_;
1086          onChanged();
1087        }
1088        this.mergeUnknownFields(other.getUnknownFields());
1089        return this;
1090      }
1091
1092      public final boolean isInitialized() {
1093        if (!hasMessage()) {
1094          
1095          return false;
1096        }
1097        return true;
1098      }
1099
1100      public Builder mergeFrom(
1101          com.google.protobuf.CodedInputStream input,
1102          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1103          throws java.io.IOException {
1104        org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parsedMessage = null;
1105        try {
1106          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
1107        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1108          parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto) e.getUnfinishedMessage();
1109          throw e;
1110        } finally {
1111          if (parsedMessage != null) {
1112            mergeFrom(parsedMessage);
1113          }
1114        }
1115        return this;
1116      }
1117      private int bitField0_;
1118
1119      // required string message = 1;
1120      private java.lang.Object message_ = "";
1121      /**
1122       * <code>required string message = 1;</code>
1123       */
1124      public boolean hasMessage() {
1125        return ((bitField0_ & 0x00000001) == 0x00000001);
1126      }
1127      /**
1128       * <code>required string message = 1;</code>
1129       */
1130      public java.lang.String getMessage() {
1131        java.lang.Object ref = message_;
1132        if (!(ref instanceof java.lang.String)) {
1133          java.lang.String s = ((com.google.protobuf.ByteString) ref)
1134              .toStringUtf8();
1135          message_ = s;
1136          return s;
1137        } else {
1138          return (java.lang.String) ref;
1139        }
1140      }
1141      /**
1142       * <code>required string message = 1;</code>
1143       */
1144      public com.google.protobuf.ByteString
1145          getMessageBytes() {
1146        java.lang.Object ref = message_;
1147        if (ref instanceof String) {
1148          com.google.protobuf.ByteString b = 
1149              com.google.protobuf.ByteString.copyFromUtf8(
1150                  (java.lang.String) ref);
1151          message_ = b;
1152          return b;
1153        } else {
1154          return (com.google.protobuf.ByteString) ref;
1155        }
1156      }
1157      /**
1158       * <code>required string message = 1;</code>
1159       */
1160      public Builder setMessage(
1161          java.lang.String value) {
1162        if (value == null) {
1163    throw new NullPointerException();
1164  }
1165  bitField0_ |= 0x00000001;
1166        message_ = value;
1167        onChanged();
1168        return this;
1169      }
1170      /**
1171       * <code>required string message = 1;</code>
1172       */
1173      public Builder clearMessage() {
1174        bitField0_ = (bitField0_ & ~0x00000001);
1175        message_ = getDefaultInstance().getMessage();
1176        onChanged();
1177        return this;
1178      }
1179      /**
1180       * <code>required string message = 1;</code>
1181       */
1182      public Builder setMessageBytes(
1183          com.google.protobuf.ByteString value) {
1184        if (value == null) {
1185    throw new NullPointerException();
1186  }
1187  bitField0_ |= 0x00000001;
1188        message_ = value;
1189        onChanged();
1190        return this;
1191      }
1192
1193      // @@protoc_insertion_point(builder_scope:hadoop.common.EchoRequestProto)
1194    }
1195
1196    static {
1197      defaultInstance = new EchoRequestProto(true);
1198      defaultInstance.initFields();
1199    }
1200
1201    // @@protoc_insertion_point(class_scope:hadoop.common.EchoRequestProto)
1202  }
1203
1204  public interface EchoResponseProtoOrBuilder
1205      extends com.google.protobuf.MessageOrBuilder {
1206
1207    // required string message = 1;
1208    /**
1209     * <code>required string message = 1;</code>
1210     */
1211    boolean hasMessage();
1212    /**
1213     * <code>required string message = 1;</code>
1214     */
1215    java.lang.String getMessage();
1216    /**
1217     * <code>required string message = 1;</code>
1218     */
1219    com.google.protobuf.ByteString
1220        getMessageBytes();
1221  }
1222  /**
1223   * Protobuf type {@code hadoop.common.EchoResponseProto}
1224   */
1225  public static final class EchoResponseProto extends
1226      com.google.protobuf.GeneratedMessage
1227      implements EchoResponseProtoOrBuilder {
1228    // Use EchoResponseProto.newBuilder() to construct.
1229    private EchoResponseProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
1230      super(builder);
1231      this.unknownFields = builder.getUnknownFields();
1232    }
1233    private EchoResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
1234
1235    private static final EchoResponseProto defaultInstance;
1236    public static EchoResponseProto getDefaultInstance() {
1237      return defaultInstance;
1238    }
1239
1240    public EchoResponseProto getDefaultInstanceForType() {
1241      return defaultInstance;
1242    }
1243
1244    private final com.google.protobuf.UnknownFieldSet unknownFields;
1245    @java.lang.Override
1246    public final com.google.protobuf.UnknownFieldSet
1247        getUnknownFields() {
1248      return this.unknownFields;
1249    }
1250    private EchoResponseProto(
1251        com.google.protobuf.CodedInputStream input,
1252        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1253        throws com.google.protobuf.InvalidProtocolBufferException {
1254      initFields();
1255      int mutable_bitField0_ = 0;
1256      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
1257          com.google.protobuf.UnknownFieldSet.newBuilder();
1258      try {
1259        boolean done = false;
1260        while (!done) {
1261          int tag = input.readTag();
1262          switch (tag) {
1263            case 0:
1264              done = true;
1265              break;
1266            default: {
1267              if (!parseUnknownField(input, unknownFields,
1268                                     extensionRegistry, tag)) {
1269                done = true;
1270              }
1271              break;
1272            }
1273            case 10: {
1274              bitField0_ |= 0x00000001;
1275              message_ = input.readBytes();
1276              break;
1277            }
1278          }
1279        }
1280      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1281        throw e.setUnfinishedMessage(this);
1282      } catch (java.io.IOException e) {
1283        throw new com.google.protobuf.InvalidProtocolBufferException(
1284            e.getMessage()).setUnfinishedMessage(this);
1285      } finally {
1286        this.unknownFields = unknownFields.build();
1287        makeExtensionsImmutable();
1288      }
1289    }
1290    public static final com.google.protobuf.Descriptors.Descriptor
1291        getDescriptor() {
1292      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoResponseProto_descriptor;
1293    }
1294
1295    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1296        internalGetFieldAccessorTable() {
1297      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoResponseProto_fieldAccessorTable
1298          .ensureFieldAccessorsInitialized(
1299              org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.Builder.class);
1300    }
1301
1302    public static com.google.protobuf.Parser<EchoResponseProto> PARSER =
1303        new com.google.protobuf.AbstractParser<EchoResponseProto>() {
1304      public EchoResponseProto parsePartialFrom(
1305          com.google.protobuf.CodedInputStream input,
1306          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1307          throws com.google.protobuf.InvalidProtocolBufferException {
1308        return new EchoResponseProto(input, extensionRegistry);
1309      }
1310    };
1311
1312    @java.lang.Override
1313    public com.google.protobuf.Parser<EchoResponseProto> getParserForType() {
1314      return PARSER;
1315    }
1316
1317    private int bitField0_;
1318    // required string message = 1;
1319    public static final int MESSAGE_FIELD_NUMBER = 1;
1320    private java.lang.Object message_;
1321    /**
1322     * <code>required string message = 1;</code>
1323     */
1324    public boolean hasMessage() {
1325      return ((bitField0_ & 0x00000001) == 0x00000001);
1326    }
1327    /**
1328     * <code>required string message = 1;</code>
1329     */
1330    public java.lang.String getMessage() {
1331      java.lang.Object ref = message_;
1332      if (ref instanceof java.lang.String) {
1333        return (java.lang.String) ref;
1334      } else {
1335        com.google.protobuf.ByteString bs = 
1336            (com.google.protobuf.ByteString) ref;
1337        java.lang.String s = bs.toStringUtf8();
1338        if (bs.isValidUtf8()) {
1339          message_ = s;
1340        }
1341        return s;
1342      }
1343    }
1344    /**
1345     * <code>required string message = 1;</code>
1346     */
1347    public com.google.protobuf.ByteString
1348        getMessageBytes() {
1349      java.lang.Object ref = message_;
1350      if (ref instanceof java.lang.String) {
1351        com.google.protobuf.ByteString b = 
1352            com.google.protobuf.ByteString.copyFromUtf8(
1353                (java.lang.String) ref);
1354        message_ = b;
1355        return b;
1356      } else {
1357        return (com.google.protobuf.ByteString) ref;
1358      }
1359    }
1360
1361    private void initFields() {
1362      message_ = "";
1363    }
1364    private byte memoizedIsInitialized = -1;
1365    public final boolean isInitialized() {
1366      byte isInitialized = memoizedIsInitialized;
1367      if (isInitialized != -1) return isInitialized == 1;
1368
1369      if (!hasMessage()) {
1370        memoizedIsInitialized = 0;
1371        return false;
1372      }
1373      memoizedIsInitialized = 1;
1374      return true;
1375    }
1376
1377    public void writeTo(com.google.protobuf.CodedOutputStream output)
1378                        throws java.io.IOException {
1379      getSerializedSize();
1380      if (((bitField0_ & 0x00000001) == 0x00000001)) {
1381        output.writeBytes(1, getMessageBytes());
1382      }
1383      getUnknownFields().writeTo(output);
1384    }
1385
1386    private int memoizedSerializedSize = -1;
1387    public int getSerializedSize() {
1388      int size = memoizedSerializedSize;
1389      if (size != -1) return size;
1390
1391      size = 0;
1392      if (((bitField0_ & 0x00000001) == 0x00000001)) {
1393        size += com.google.protobuf.CodedOutputStream
1394          .computeBytesSize(1, getMessageBytes());
1395      }
1396      size += getUnknownFields().getSerializedSize();
1397      memoizedSerializedSize = size;
1398      return size;
1399    }
1400
1401    private static final long serialVersionUID = 0L;
1402    @java.lang.Override
1403    protected java.lang.Object writeReplace()
1404        throws java.io.ObjectStreamException {
1405      return super.writeReplace();
1406    }
1407
1408    @java.lang.Override
1409    public boolean equals(final java.lang.Object obj) {
1410      if (obj == this) {
1411       return true;
1412      }
1413      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto)) {
1414        return super.equals(obj);
1415      }
1416      org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto other = (org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto) obj;
1417
1418      boolean result = true;
1419      result = result && (hasMessage() == other.hasMessage());
1420      if (hasMessage()) {
1421        result = result && getMessage()
1422            .equals(other.getMessage());
1423      }
1424      result = result &&
1425          getUnknownFields().equals(other.getUnknownFields());
1426      return result;
1427    }
1428
1429    private int memoizedHashCode = 0;
1430    @java.lang.Override
1431    public int hashCode() {
1432      if (memoizedHashCode != 0) {
1433        return memoizedHashCode;
1434      }
1435      int hash = 41;
1436      hash = (19 * hash) + getDescriptorForType().hashCode();
1437      if (hasMessage()) {
1438        hash = (37 * hash) + MESSAGE_FIELD_NUMBER;
1439        hash = (53 * hash) + getMessage().hashCode();
1440      }
1441      hash = (29 * hash) + getUnknownFields().hashCode();
1442      memoizedHashCode = hash;
1443      return hash;
1444    }
1445
1446    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(
1447        com.google.protobuf.ByteString data)
1448        throws com.google.protobuf.InvalidProtocolBufferException {
1449      return PARSER.parseFrom(data);
1450    }
1451    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(
1452        com.google.protobuf.ByteString data,
1453        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1454        throws com.google.protobuf.InvalidProtocolBufferException {
1455      return PARSER.parseFrom(data, extensionRegistry);
1456    }
1457    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(byte[] data)
1458        throws com.google.protobuf.InvalidProtocolBufferException {
1459      return PARSER.parseFrom(data);
1460    }
1461    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(
1462        byte[] data,
1463        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1464        throws com.google.protobuf.InvalidProtocolBufferException {
1465      return PARSER.parseFrom(data, extensionRegistry);
1466    }
1467    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(java.io.InputStream input)
1468        throws java.io.IOException {
1469      return PARSER.parseFrom(input);
1470    }
1471    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(
1472        java.io.InputStream input,
1473        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1474        throws java.io.IOException {
1475      return PARSER.parseFrom(input, extensionRegistry);
1476    }
1477    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseDelimitedFrom(java.io.InputStream input)
1478        throws java.io.IOException {
1479      return PARSER.parseDelimitedFrom(input);
1480    }
1481    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseDelimitedFrom(
1482        java.io.InputStream input,
1483        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1484        throws java.io.IOException {
1485      return PARSER.parseDelimitedFrom(input, extensionRegistry);
1486    }
1487    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(
1488        com.google.protobuf.CodedInputStream input)
1489        throws java.io.IOException {
1490      return PARSER.parseFrom(input);
1491    }
1492    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(
1493        com.google.protobuf.CodedInputStream input,
1494        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1495        throws java.io.IOException {
1496      return PARSER.parseFrom(input, extensionRegistry);
1497    }
1498
1499    public static Builder newBuilder() { return Builder.create(); }
1500    public Builder newBuilderForType() { return newBuilder(); }
1501    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto prototype) {
1502      return newBuilder().mergeFrom(prototype);
1503    }
1504    public Builder toBuilder() { return newBuilder(this); }
1505
1506    @java.lang.Override
1507    protected Builder newBuilderForType(
1508        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1509      Builder builder = new Builder(parent);
1510      return builder;
1511    }
1512    /**
1513     * Protobuf type {@code hadoop.common.EchoResponseProto}
1514     */
1515    public static final class Builder extends
1516        com.google.protobuf.GeneratedMessage.Builder<Builder>
1517       implements org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProtoOrBuilder {
1518      public static final com.google.protobuf.Descriptors.Descriptor
1519          getDescriptor() {
1520        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoResponseProto_descriptor;
1521      }
1522
1523      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1524          internalGetFieldAccessorTable() {
1525        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoResponseProto_fieldAccessorTable
1526            .ensureFieldAccessorsInitialized(
1527                org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.Builder.class);
1528      }
1529
1530      // Construct using org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.newBuilder()
1531      private Builder() {
1532        maybeForceBuilderInitialization();
1533      }
1534
1535      private Builder(
1536          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1537        super(parent);
1538        maybeForceBuilderInitialization();
1539      }
1540      private void maybeForceBuilderInitialization() {
1541        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
1542        }
1543      }
1544      private static Builder create() {
1545        return new Builder();
1546      }
1547
1548      public Builder clear() {
1549        super.clear();
1550        message_ = "";
1551        bitField0_ = (bitField0_ & ~0x00000001);
1552        return this;
1553      }
1554
1555      public Builder clone() {
1556        return create().mergeFrom(buildPartial());
1557      }
1558
1559      public com.google.protobuf.Descriptors.Descriptor
1560          getDescriptorForType() {
1561        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoResponseProto_descriptor;
1562      }
1563
1564      public org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto getDefaultInstanceForType() {
1565        return org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance();
1566      }
1567
1568      public org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto build() {
1569        org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto result = buildPartial();
1570        if (!result.isInitialized()) {
1571          throw newUninitializedMessageException(result);
1572        }
1573        return result;
1574      }
1575
1576      public org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto buildPartial() {
1577        org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto result = new org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto(this);
1578        int from_bitField0_ = bitField0_;
1579        int to_bitField0_ = 0;
1580        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
1581          to_bitField0_ |= 0x00000001;
1582        }
1583        result.message_ = message_;
1584        result.bitField0_ = to_bitField0_;
1585        onBuilt();
1586        return result;
1587      }
1588
1589      public Builder mergeFrom(com.google.protobuf.Message other) {
1590        if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto) {
1591          return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto)other);
1592        } else {
1593          super.mergeFrom(other);
1594          return this;
1595        }
1596      }
1597
1598      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto other) {
1599        if (other == org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance()) return this;
1600        if (other.hasMessage()) {
1601          bitField0_ |= 0x00000001;
1602          message_ = other.message_;
1603          onChanged();
1604        }
1605        this.mergeUnknownFields(other.getUnknownFields());
1606        return this;
1607      }
1608
1609      public final boolean isInitialized() {
1610        if (!hasMessage()) {
1611          
1612          return false;
1613        }
1614        return true;
1615      }
1616
1617      public Builder mergeFrom(
1618          com.google.protobuf.CodedInputStream input,
1619          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1620          throws java.io.IOException {
1621        org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parsedMessage = null;
1622        try {
1623          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
1624        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1625          parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto) e.getUnfinishedMessage();
1626          throw e;
1627        } finally {
1628          if (parsedMessage != null) {
1629            mergeFrom(parsedMessage);
1630          }
1631        }
1632        return this;
1633      }
1634      private int bitField0_;
1635
1636      // required string message = 1;
1637      private java.lang.Object message_ = "";
1638      /**
1639       * <code>required string message = 1;</code>
1640       */
1641      public boolean hasMessage() {
1642        return ((bitField0_ & 0x00000001) == 0x00000001);
1643      }
1644      /**
1645       * <code>required string message = 1;</code>
1646       */
1647      public java.lang.String getMessage() {
1648        java.lang.Object ref = message_;
1649        if (!(ref instanceof java.lang.String)) {
1650          java.lang.String s = ((com.google.protobuf.ByteString) ref)
1651              .toStringUtf8();
1652          message_ = s;
1653          return s;
1654        } else {
1655          return (java.lang.String) ref;
1656        }
1657      }
1658      /**
1659       * <code>required string message = 1;</code>
1660       */
1661      public com.google.protobuf.ByteString
1662          getMessageBytes() {
1663        java.lang.Object ref = message_;
1664        if (ref instanceof String) {
1665          com.google.protobuf.ByteString b = 
1666              com.google.protobuf.ByteString.copyFromUtf8(
1667                  (java.lang.String) ref);
1668          message_ = b;
1669          return b;
1670        } else {
1671          return (com.google.protobuf.ByteString) ref;
1672        }
1673      }
1674      /**
1675       * <code>required string message = 1;</code>
1676       */
1677      public Builder setMessage(
1678          java.lang.String value) {
1679        if (value == null) {
1680    throw new NullPointerException();
1681  }
1682  bitField0_ |= 0x00000001;
1683        message_ = value;
1684        onChanged();
1685        return this;
1686      }
1687      /**
1688       * <code>required string message = 1;</code>
1689       */
1690      public Builder clearMessage() {
1691        bitField0_ = (bitField0_ & ~0x00000001);
1692        message_ = getDefaultInstance().getMessage();
1693        onChanged();
1694        return this;
1695      }
1696      /**
1697       * <code>required string message = 1;</code>
1698       */
1699      public Builder setMessageBytes(
1700          com.google.protobuf.ByteString value) {
1701        if (value == null) {
1702    throw new NullPointerException();
1703  }
1704  bitField0_ |= 0x00000001;
1705        message_ = value;
1706        onChanged();
1707        return this;
1708      }
1709
1710      // @@protoc_insertion_point(builder_scope:hadoop.common.EchoResponseProto)
1711    }
1712
1713    static {
1714      defaultInstance = new EchoResponseProto(true);
1715      defaultInstance.initFields();
1716    }
1717
1718    // @@protoc_insertion_point(class_scope:hadoop.common.EchoResponseProto)
1719  }
1720
1721  private static com.google.protobuf.Descriptors.Descriptor
1722    internal_static_hadoop_common_EmptyRequestProto_descriptor;
1723  private static
1724    com.google.protobuf.GeneratedMessage.FieldAccessorTable
1725      internal_static_hadoop_common_EmptyRequestProto_fieldAccessorTable;
1726  private static com.google.protobuf.Descriptors.Descriptor
1727    internal_static_hadoop_common_EmptyResponseProto_descriptor;
1728  private static
1729    com.google.protobuf.GeneratedMessage.FieldAccessorTable
1730      internal_static_hadoop_common_EmptyResponseProto_fieldAccessorTable;
1731  private static com.google.protobuf.Descriptors.Descriptor
1732    internal_static_hadoop_common_EchoRequestProto_descriptor;
1733  private static
1734    com.google.protobuf.GeneratedMessage.FieldAccessorTable
1735      internal_static_hadoop_common_EchoRequestProto_fieldAccessorTable;
1736  private static com.google.protobuf.Descriptors.Descriptor
1737    internal_static_hadoop_common_EchoResponseProto_descriptor;
1738  private static
1739    com.google.protobuf.GeneratedMessage.FieldAccessorTable
1740      internal_static_hadoop_common_EchoResponseProto_fieldAccessorTable;
1741
1742  public static com.google.protobuf.Descriptors.FileDescriptor
1743      getDescriptor() {
1744    return descriptor;
1745  }
1746  private static com.google.protobuf.Descriptors.FileDescriptor
1747      descriptor;
1748  static {
1749    java.lang.String[] descriptorData = {
1750      "\n\ntest.proto\022\rhadoop.common\"\023\n\021EmptyRequ" +
1751      "estProto\"\024\n\022EmptyResponseProto\"#\n\020EchoRe" +
1752      "questProto\022\017\n\007message\030\001 \002(\t\"$\n\021EchoRespo" +
1753      "nseProto\022\017\n\007message\030\001 \002(\tB/\n\036org.apache." +
1754      "hadoop.ipc.protobufB\nTestProtos\240\001\001"
1755    };
1756    com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
1757      new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
1758        public com.google.protobuf.ExtensionRegistry assignDescriptors(
1759            com.google.protobuf.Descriptors.FileDescriptor root) {
1760          descriptor = root;
1761          internal_static_hadoop_common_EmptyRequestProto_descriptor =
1762            getDescriptor().getMessageTypes().get(0);
1763          internal_static_hadoop_common_EmptyRequestProto_fieldAccessorTable = new
1764            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
1765              internal_static_hadoop_common_EmptyRequestProto_descriptor,
1766              new java.lang.String[] { });
1767          internal_static_hadoop_common_EmptyResponseProto_descriptor =
1768            getDescriptor().getMessageTypes().get(1);
1769          internal_static_hadoop_common_EmptyResponseProto_fieldAccessorTable = new
1770            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
1771              internal_static_hadoop_common_EmptyResponseProto_descriptor,
1772              new java.lang.String[] { });
1773          internal_static_hadoop_common_EchoRequestProto_descriptor =
1774            getDescriptor().getMessageTypes().get(2);
1775          internal_static_hadoop_common_EchoRequestProto_fieldAccessorTable = new
1776            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
1777              internal_static_hadoop_common_EchoRequestProto_descriptor,
1778              new java.lang.String[] { "Message", });
1779          internal_static_hadoop_common_EchoResponseProto_descriptor =
1780            getDescriptor().getMessageTypes().get(3);
1781          internal_static_hadoop_common_EchoResponseProto_fieldAccessorTable = new
1782            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
1783              internal_static_hadoop_common_EchoResponseProto_descriptor,
1784              new java.lang.String[] { "Message", });
1785          return null;
1786        }
1787      };
1788    com.google.protobuf.Descriptors.FileDescriptor
1789      .internalBuildGeneratedFileFrom(descriptorData,
1790        new com.google.protobuf.Descriptors.FileDescriptor[] {
1791        }, assigner);
1792  }
1793
1794  // @@protoc_insertion_point(outer_class_scope)
1795}