001// Generated by the protocol buffer compiler.  DO NOT EDIT!
002// source: test.proto
003
004package org.apache.hadoop.ipc.protobuf;
005
006public final class TestProtos {
007  private TestProtos() {}
008  public static void registerAllExtensions(
009      com.google.protobuf.ExtensionRegistry registry) {
010  }
011  public interface EmptyRequestProtoOrBuilder
012      extends com.google.protobuf.MessageOrBuilder {
013  }
014  public static final class EmptyRequestProto extends
015      com.google.protobuf.GeneratedMessage
016      implements EmptyRequestProtoOrBuilder {
017    // Use EmptyRequestProto.newBuilder() to construct.
018    private EmptyRequestProto(Builder builder) {
019      super(builder);
020    }
021    private EmptyRequestProto(boolean noInit) {}
022    
023    private static final EmptyRequestProto defaultInstance;
024    public static EmptyRequestProto getDefaultInstance() {
025      return defaultInstance;
026    }
027    
028    public EmptyRequestProto getDefaultInstanceForType() {
029      return defaultInstance;
030    }
031    
032    public static final com.google.protobuf.Descriptors.Descriptor
033        getDescriptor() {
034      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EmptyRequestProto_descriptor;
035    }
036    
037    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
038        internalGetFieldAccessorTable() {
039      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EmptyRequestProto_fieldAccessorTable;
040    }
041    
042    private void initFields() {
043    }
044    private byte memoizedIsInitialized = -1;
045    public final boolean isInitialized() {
046      byte isInitialized = memoizedIsInitialized;
047      if (isInitialized != -1) return isInitialized == 1;
048      
049      memoizedIsInitialized = 1;
050      return true;
051    }
052    
053    public void writeTo(com.google.protobuf.CodedOutputStream output)
054                        throws java.io.IOException {
055      getSerializedSize();
056      getUnknownFields().writeTo(output);
057    }
058    
059    private int memoizedSerializedSize = -1;
060    public int getSerializedSize() {
061      int size = memoizedSerializedSize;
062      if (size != -1) return size;
063    
064      size = 0;
065      size += getUnknownFields().getSerializedSize();
066      memoizedSerializedSize = size;
067      return size;
068    }
069    
070    private static final long serialVersionUID = 0L;
071    @java.lang.Override
072    protected java.lang.Object writeReplace()
073        throws java.io.ObjectStreamException {
074      return super.writeReplace();
075    }
076    
077    @java.lang.Override
078    public boolean equals(final java.lang.Object obj) {
079      if (obj == this) {
080       return true;
081      }
082      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)) {
083        return super.equals(obj);
084      }
085      org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto other = (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto) obj;
086      
087      boolean result = true;
088      result = result &&
089          getUnknownFields().equals(other.getUnknownFields());
090      return result;
091    }
092    
093    @java.lang.Override
094    public int hashCode() {
095      int hash = 41;
096      hash = (19 * hash) + getDescriptorForType().hashCode();
097      hash = (29 * hash) + getUnknownFields().hashCode();
098      return hash;
099    }
100    
101    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(
102        com.google.protobuf.ByteString data)
103        throws com.google.protobuf.InvalidProtocolBufferException {
104      return newBuilder().mergeFrom(data).buildParsed();
105    }
106    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(
107        com.google.protobuf.ByteString data,
108        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
109        throws com.google.protobuf.InvalidProtocolBufferException {
110      return newBuilder().mergeFrom(data, extensionRegistry)
111               .buildParsed();
112    }
113    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(byte[] data)
114        throws com.google.protobuf.InvalidProtocolBufferException {
115      return newBuilder().mergeFrom(data).buildParsed();
116    }
117    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(
118        byte[] data,
119        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
120        throws com.google.protobuf.InvalidProtocolBufferException {
121      return newBuilder().mergeFrom(data, extensionRegistry)
122               .buildParsed();
123    }
124    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(java.io.InputStream input)
125        throws java.io.IOException {
126      return newBuilder().mergeFrom(input).buildParsed();
127    }
128    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(
129        java.io.InputStream input,
130        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
131        throws java.io.IOException {
132      return newBuilder().mergeFrom(input, extensionRegistry)
133               .buildParsed();
134    }
135    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseDelimitedFrom(java.io.InputStream input)
136        throws java.io.IOException {
137      Builder builder = newBuilder();
138      if (builder.mergeDelimitedFrom(input)) {
139        return builder.buildParsed();
140      } else {
141        return null;
142      }
143    }
144    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseDelimitedFrom(
145        java.io.InputStream input,
146        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
147        throws java.io.IOException {
148      Builder builder = newBuilder();
149      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
150        return builder.buildParsed();
151      } else {
152        return null;
153      }
154    }
155    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(
156        com.google.protobuf.CodedInputStream input)
157        throws java.io.IOException {
158      return newBuilder().mergeFrom(input).buildParsed();
159    }
160    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(
161        com.google.protobuf.CodedInputStream input,
162        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
163        throws java.io.IOException {
164      return newBuilder().mergeFrom(input, extensionRegistry)
165               .buildParsed();
166    }
167    
168    public static Builder newBuilder() { return Builder.create(); }
169    public Builder newBuilderForType() { return newBuilder(); }
170    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto prototype) {
171      return newBuilder().mergeFrom(prototype);
172    }
173    public Builder toBuilder() { return newBuilder(this); }
174    
175    @java.lang.Override
176    protected Builder newBuilderForType(
177        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
178      Builder builder = new Builder(parent);
179      return builder;
180    }
181    public static final class Builder extends
182        com.google.protobuf.GeneratedMessage.Builder<Builder>
183       implements org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProtoOrBuilder {
184      public static final com.google.protobuf.Descriptors.Descriptor
185          getDescriptor() {
186        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EmptyRequestProto_descriptor;
187      }
188      
189      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
190          internalGetFieldAccessorTable() {
191        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EmptyRequestProto_fieldAccessorTable;
192      }
193      
194      // Construct using org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.newBuilder()
195      private Builder() {
196        maybeForceBuilderInitialization();
197      }
198      
199      private Builder(BuilderParent parent) {
200        super(parent);
201        maybeForceBuilderInitialization();
202      }
203      private void maybeForceBuilderInitialization() {
204        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
205        }
206      }
207      private static Builder create() {
208        return new Builder();
209      }
210      
211      public Builder clear() {
212        super.clear();
213        return this;
214      }
215      
216      public Builder clone() {
217        return create().mergeFrom(buildPartial());
218      }
219      
220      public com.google.protobuf.Descriptors.Descriptor
221          getDescriptorForType() {
222        return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDescriptor();
223      }
224      
225      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto getDefaultInstanceForType() {
226        return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
227      }
228      
229      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto build() {
230        org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto result = buildPartial();
231        if (!result.isInitialized()) {
232          throw newUninitializedMessageException(result);
233        }
234        return result;
235      }
236      
237      private org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto buildParsed()
238          throws com.google.protobuf.InvalidProtocolBufferException {
239        org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto result = buildPartial();
240        if (!result.isInitialized()) {
241          throw newUninitializedMessageException(
242            result).asInvalidProtocolBufferException();
243        }
244        return result;
245      }
246      
247      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto buildPartial() {
248        org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto result = new org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto(this);
249        onBuilt();
250        return result;
251      }
252      
253      public Builder mergeFrom(com.google.protobuf.Message other) {
254        if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto) {
255          return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)other);
256        } else {
257          super.mergeFrom(other);
258          return this;
259        }
260      }
261      
262      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto other) {
263        if (other == org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance()) return this;
264        this.mergeUnknownFields(other.getUnknownFields());
265        return this;
266      }
267      
268      public final boolean isInitialized() {
269        return true;
270      }
271      
272      public Builder mergeFrom(
273          com.google.protobuf.CodedInputStream input,
274          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
275          throws java.io.IOException {
276        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
277          com.google.protobuf.UnknownFieldSet.newBuilder(
278            this.getUnknownFields());
279        while (true) {
280          int tag = input.readTag();
281          switch (tag) {
282            case 0:
283              this.setUnknownFields(unknownFields.build());
284              onChanged();
285              return this;
286            default: {
287              if (!parseUnknownField(input, unknownFields,
288                                     extensionRegistry, tag)) {
289                this.setUnknownFields(unknownFields.build());
290                onChanged();
291                return this;
292              }
293              break;
294            }
295          }
296        }
297      }
298      
299      
300      // @@protoc_insertion_point(builder_scope:hadoop.common.EmptyRequestProto)
301    }
302    
303    static {
304      defaultInstance = new EmptyRequestProto(true);
305      defaultInstance.initFields();
306    }
307    
308    // @@protoc_insertion_point(class_scope:hadoop.common.EmptyRequestProto)
309  }
310  
311  public interface EmptyResponseProtoOrBuilder
312      extends com.google.protobuf.MessageOrBuilder {
313  }
314  public static final class EmptyResponseProto extends
315      com.google.protobuf.GeneratedMessage
316      implements EmptyResponseProtoOrBuilder {
317    // Use EmptyResponseProto.newBuilder() to construct.
318    private EmptyResponseProto(Builder builder) {
319      super(builder);
320    }
321    private EmptyResponseProto(boolean noInit) {}
322    
323    private static final EmptyResponseProto defaultInstance;
324    public static EmptyResponseProto getDefaultInstance() {
325      return defaultInstance;
326    }
327    
328    public EmptyResponseProto getDefaultInstanceForType() {
329      return defaultInstance;
330    }
331    
332    public static final com.google.protobuf.Descriptors.Descriptor
333        getDescriptor() {
334      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EmptyResponseProto_descriptor;
335    }
336    
337    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
338        internalGetFieldAccessorTable() {
339      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EmptyResponseProto_fieldAccessorTable;
340    }
341    
342    private void initFields() {
343    }
344    private byte memoizedIsInitialized = -1;
345    public final boolean isInitialized() {
346      byte isInitialized = memoizedIsInitialized;
347      if (isInitialized != -1) return isInitialized == 1;
348      
349      memoizedIsInitialized = 1;
350      return true;
351    }
352    
353    public void writeTo(com.google.protobuf.CodedOutputStream output)
354                        throws java.io.IOException {
355      getSerializedSize();
356      getUnknownFields().writeTo(output);
357    }
358    
359    private int memoizedSerializedSize = -1;
360    public int getSerializedSize() {
361      int size = memoizedSerializedSize;
362      if (size != -1) return size;
363    
364      size = 0;
365      size += getUnknownFields().getSerializedSize();
366      memoizedSerializedSize = size;
367      return size;
368    }
369    
370    private static final long serialVersionUID = 0L;
371    @java.lang.Override
372    protected java.lang.Object writeReplace()
373        throws java.io.ObjectStreamException {
374      return super.writeReplace();
375    }
376    
377    @java.lang.Override
378    public boolean equals(final java.lang.Object obj) {
379      if (obj == this) {
380       return true;
381      }
382      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto)) {
383        return super.equals(obj);
384      }
385      org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto other = (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto) obj;
386      
387      boolean result = true;
388      result = result &&
389          getUnknownFields().equals(other.getUnknownFields());
390      return result;
391    }
392    
393    @java.lang.Override
394    public int hashCode() {
395      int hash = 41;
396      hash = (19 * hash) + getDescriptorForType().hashCode();
397      hash = (29 * hash) + getUnknownFields().hashCode();
398      return hash;
399    }
400    
401    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(
402        com.google.protobuf.ByteString data)
403        throws com.google.protobuf.InvalidProtocolBufferException {
404      return newBuilder().mergeFrom(data).buildParsed();
405    }
406    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(
407        com.google.protobuf.ByteString data,
408        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
409        throws com.google.protobuf.InvalidProtocolBufferException {
410      return newBuilder().mergeFrom(data, extensionRegistry)
411               .buildParsed();
412    }
413    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(byte[] data)
414        throws com.google.protobuf.InvalidProtocolBufferException {
415      return newBuilder().mergeFrom(data).buildParsed();
416    }
417    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(
418        byte[] data,
419        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
420        throws com.google.protobuf.InvalidProtocolBufferException {
421      return newBuilder().mergeFrom(data, extensionRegistry)
422               .buildParsed();
423    }
424    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(java.io.InputStream input)
425        throws java.io.IOException {
426      return newBuilder().mergeFrom(input).buildParsed();
427    }
428    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(
429        java.io.InputStream input,
430        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
431        throws java.io.IOException {
432      return newBuilder().mergeFrom(input, extensionRegistry)
433               .buildParsed();
434    }
435    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseDelimitedFrom(java.io.InputStream input)
436        throws java.io.IOException {
437      Builder builder = newBuilder();
438      if (builder.mergeDelimitedFrom(input)) {
439        return builder.buildParsed();
440      } else {
441        return null;
442      }
443    }
444    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseDelimitedFrom(
445        java.io.InputStream input,
446        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
447        throws java.io.IOException {
448      Builder builder = newBuilder();
449      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
450        return builder.buildParsed();
451      } else {
452        return null;
453      }
454    }
455    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(
456        com.google.protobuf.CodedInputStream input)
457        throws java.io.IOException {
458      return newBuilder().mergeFrom(input).buildParsed();
459    }
460    public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(
461        com.google.protobuf.CodedInputStream input,
462        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
463        throws java.io.IOException {
464      return newBuilder().mergeFrom(input, extensionRegistry)
465               .buildParsed();
466    }
467    
468    public static Builder newBuilder() { return Builder.create(); }
469    public Builder newBuilderForType() { return newBuilder(); }
470    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto prototype) {
471      return newBuilder().mergeFrom(prototype);
472    }
473    public Builder toBuilder() { return newBuilder(this); }
474    
475    @java.lang.Override
476    protected Builder newBuilderForType(
477        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
478      Builder builder = new Builder(parent);
479      return builder;
480    }
481    public static final class Builder extends
482        com.google.protobuf.GeneratedMessage.Builder<Builder>
483       implements org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProtoOrBuilder {
484      public static final com.google.protobuf.Descriptors.Descriptor
485          getDescriptor() {
486        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EmptyResponseProto_descriptor;
487      }
488      
489      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
490          internalGetFieldAccessorTable() {
491        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EmptyResponseProto_fieldAccessorTable;
492      }
493      
494      // Construct using org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.newBuilder()
495      private Builder() {
496        maybeForceBuilderInitialization();
497      }
498      
499      private Builder(BuilderParent parent) {
500        super(parent);
501        maybeForceBuilderInitialization();
502      }
503      private void maybeForceBuilderInitialization() {
504        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
505        }
506      }
507      private static Builder create() {
508        return new Builder();
509      }
510      
511      public Builder clear() {
512        super.clear();
513        return this;
514      }
515      
516      public Builder clone() {
517        return create().mergeFrom(buildPartial());
518      }
519      
520      public com.google.protobuf.Descriptors.Descriptor
521          getDescriptorForType() {
522        return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDescriptor();
523      }
524      
525      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto getDefaultInstanceForType() {
526        return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
527      }
528      
529      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto build() {
530        org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto result = buildPartial();
531        if (!result.isInitialized()) {
532          throw newUninitializedMessageException(result);
533        }
534        return result;
535      }
536      
537      private org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto buildParsed()
538          throws com.google.protobuf.InvalidProtocolBufferException {
539        org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto result = buildPartial();
540        if (!result.isInitialized()) {
541          throw newUninitializedMessageException(
542            result).asInvalidProtocolBufferException();
543        }
544        return result;
545      }
546      
547      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto buildPartial() {
548        org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto result = new org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto(this);
549        onBuilt();
550        return result;
551      }
552      
553      public Builder mergeFrom(com.google.protobuf.Message other) {
554        if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto) {
555          return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto)other);
556        } else {
557          super.mergeFrom(other);
558          return this;
559        }
560      }
561      
562      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto other) {
563        if (other == org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance()) return this;
564        this.mergeUnknownFields(other.getUnknownFields());
565        return this;
566      }
567      
568      public final boolean isInitialized() {
569        return true;
570      }
571      
572      public Builder mergeFrom(
573          com.google.protobuf.CodedInputStream input,
574          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
575          throws java.io.IOException {
576        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
577          com.google.protobuf.UnknownFieldSet.newBuilder(
578            this.getUnknownFields());
579        while (true) {
580          int tag = input.readTag();
581          switch (tag) {
582            case 0:
583              this.setUnknownFields(unknownFields.build());
584              onChanged();
585              return this;
586            default: {
587              if (!parseUnknownField(input, unknownFields,
588                                     extensionRegistry, tag)) {
589                this.setUnknownFields(unknownFields.build());
590                onChanged();
591                return this;
592              }
593              break;
594            }
595          }
596        }
597      }
598      
599      
600      // @@protoc_insertion_point(builder_scope:hadoop.common.EmptyResponseProto)
601    }
602    
603    static {
604      defaultInstance = new EmptyResponseProto(true);
605      defaultInstance.initFields();
606    }
607    
608    // @@protoc_insertion_point(class_scope:hadoop.common.EmptyResponseProto)
609  }
610  
611  public interface EchoRequestProtoOrBuilder
612      extends com.google.protobuf.MessageOrBuilder {
613    
614    // required string message = 1;
615    boolean hasMessage();
616    String getMessage();
617  }
618  public static final class EchoRequestProto extends
619      com.google.protobuf.GeneratedMessage
620      implements EchoRequestProtoOrBuilder {
621    // Use EchoRequestProto.newBuilder() to construct.
622    private EchoRequestProto(Builder builder) {
623      super(builder);
624    }
625    private EchoRequestProto(boolean noInit) {}
626    
627    private static final EchoRequestProto defaultInstance;
628    public static EchoRequestProto getDefaultInstance() {
629      return defaultInstance;
630    }
631    
632    public EchoRequestProto getDefaultInstanceForType() {
633      return defaultInstance;
634    }
635    
636    public static final com.google.protobuf.Descriptors.Descriptor
637        getDescriptor() {
638      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoRequestProto_descriptor;
639    }
640    
641    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
642        internalGetFieldAccessorTable() {
643      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoRequestProto_fieldAccessorTable;
644    }
645    
646    private int bitField0_;
647    // required string message = 1;
648    public static final int MESSAGE_FIELD_NUMBER = 1;
649    private java.lang.Object message_;
650    public boolean hasMessage() {
651      return ((bitField0_ & 0x00000001) == 0x00000001);
652    }
653    public String getMessage() {
654      java.lang.Object ref = message_;
655      if (ref instanceof String) {
656        return (String) ref;
657      } else {
658        com.google.protobuf.ByteString bs = 
659            (com.google.protobuf.ByteString) ref;
660        String s = bs.toStringUtf8();
661        if (com.google.protobuf.Internal.isValidUtf8(bs)) {
662          message_ = s;
663        }
664        return s;
665      }
666    }
667    private com.google.protobuf.ByteString getMessageBytes() {
668      java.lang.Object ref = message_;
669      if (ref instanceof String) {
670        com.google.protobuf.ByteString b = 
671            com.google.protobuf.ByteString.copyFromUtf8((String) ref);
672        message_ = b;
673        return b;
674      } else {
675        return (com.google.protobuf.ByteString) ref;
676      }
677    }
678    
679    private void initFields() {
680      message_ = "";
681    }
682    private byte memoizedIsInitialized = -1;
683    public final boolean isInitialized() {
684      byte isInitialized = memoizedIsInitialized;
685      if (isInitialized != -1) return isInitialized == 1;
686      
687      if (!hasMessage()) {
688        memoizedIsInitialized = 0;
689        return false;
690      }
691      memoizedIsInitialized = 1;
692      return true;
693    }
694    
695    public void writeTo(com.google.protobuf.CodedOutputStream output)
696                        throws java.io.IOException {
697      getSerializedSize();
698      if (((bitField0_ & 0x00000001) == 0x00000001)) {
699        output.writeBytes(1, getMessageBytes());
700      }
701      getUnknownFields().writeTo(output);
702    }
703    
704    private int memoizedSerializedSize = -1;
705    public int getSerializedSize() {
706      int size = memoizedSerializedSize;
707      if (size != -1) return size;
708    
709      size = 0;
710      if (((bitField0_ & 0x00000001) == 0x00000001)) {
711        size += com.google.protobuf.CodedOutputStream
712          .computeBytesSize(1, getMessageBytes());
713      }
714      size += getUnknownFields().getSerializedSize();
715      memoizedSerializedSize = size;
716      return size;
717    }
718    
719    private static final long serialVersionUID = 0L;
720    @java.lang.Override
721    protected java.lang.Object writeReplace()
722        throws java.io.ObjectStreamException {
723      return super.writeReplace();
724    }
725    
726    @java.lang.Override
727    public boolean equals(final java.lang.Object obj) {
728      if (obj == this) {
729       return true;
730      }
731      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto)) {
732        return super.equals(obj);
733      }
734      org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto other = (org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto) obj;
735      
736      boolean result = true;
737      result = result && (hasMessage() == other.hasMessage());
738      if (hasMessage()) {
739        result = result && getMessage()
740            .equals(other.getMessage());
741      }
742      result = result &&
743          getUnknownFields().equals(other.getUnknownFields());
744      return result;
745    }
746    
747    @java.lang.Override
748    public int hashCode() {
749      int hash = 41;
750      hash = (19 * hash) + getDescriptorForType().hashCode();
751      if (hasMessage()) {
752        hash = (37 * hash) + MESSAGE_FIELD_NUMBER;
753        hash = (53 * hash) + getMessage().hashCode();
754      }
755      hash = (29 * hash) + getUnknownFields().hashCode();
756      return hash;
757    }
758    
759    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(
760        com.google.protobuf.ByteString data)
761        throws com.google.protobuf.InvalidProtocolBufferException {
762      return newBuilder().mergeFrom(data).buildParsed();
763    }
764    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(
765        com.google.protobuf.ByteString data,
766        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
767        throws com.google.protobuf.InvalidProtocolBufferException {
768      return newBuilder().mergeFrom(data, extensionRegistry)
769               .buildParsed();
770    }
771    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(byte[] data)
772        throws com.google.protobuf.InvalidProtocolBufferException {
773      return newBuilder().mergeFrom(data).buildParsed();
774    }
775    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(
776        byte[] data,
777        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
778        throws com.google.protobuf.InvalidProtocolBufferException {
779      return newBuilder().mergeFrom(data, extensionRegistry)
780               .buildParsed();
781    }
782    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(java.io.InputStream input)
783        throws java.io.IOException {
784      return newBuilder().mergeFrom(input).buildParsed();
785    }
786    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(
787        java.io.InputStream input,
788        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
789        throws java.io.IOException {
790      return newBuilder().mergeFrom(input, extensionRegistry)
791               .buildParsed();
792    }
793    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseDelimitedFrom(java.io.InputStream input)
794        throws java.io.IOException {
795      Builder builder = newBuilder();
796      if (builder.mergeDelimitedFrom(input)) {
797        return builder.buildParsed();
798      } else {
799        return null;
800      }
801    }
802    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseDelimitedFrom(
803        java.io.InputStream input,
804        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
805        throws java.io.IOException {
806      Builder builder = newBuilder();
807      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
808        return builder.buildParsed();
809      } else {
810        return null;
811      }
812    }
813    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(
814        com.google.protobuf.CodedInputStream input)
815        throws java.io.IOException {
816      return newBuilder().mergeFrom(input).buildParsed();
817    }
818    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(
819        com.google.protobuf.CodedInputStream input,
820        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
821        throws java.io.IOException {
822      return newBuilder().mergeFrom(input, extensionRegistry)
823               .buildParsed();
824    }
825    
826    public static Builder newBuilder() { return Builder.create(); }
827    public Builder newBuilderForType() { return newBuilder(); }
828    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto prototype) {
829      return newBuilder().mergeFrom(prototype);
830    }
831    public Builder toBuilder() { return newBuilder(this); }
832    
833    @java.lang.Override
834    protected Builder newBuilderForType(
835        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
836      Builder builder = new Builder(parent);
837      return builder;
838    }
839    public static final class Builder extends
840        com.google.protobuf.GeneratedMessage.Builder<Builder>
841       implements org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProtoOrBuilder {
842      public static final com.google.protobuf.Descriptors.Descriptor
843          getDescriptor() {
844        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoRequestProto_descriptor;
845      }
846      
847      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
848          internalGetFieldAccessorTable() {
849        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoRequestProto_fieldAccessorTable;
850      }
851      
852      // Construct using org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.newBuilder()
853      private Builder() {
854        maybeForceBuilderInitialization();
855      }
856      
857      private Builder(BuilderParent parent) {
858        super(parent);
859        maybeForceBuilderInitialization();
860      }
861      private void maybeForceBuilderInitialization() {
862        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
863        }
864      }
865      private static Builder create() {
866        return new Builder();
867      }
868      
869      public Builder clear() {
870        super.clear();
871        message_ = "";
872        bitField0_ = (bitField0_ & ~0x00000001);
873        return this;
874      }
875      
876      public Builder clone() {
877        return create().mergeFrom(buildPartial());
878      }
879      
880      public com.google.protobuf.Descriptors.Descriptor
881          getDescriptorForType() {
882        return org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.getDescriptor();
883      }
884      
885      public org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto getDefaultInstanceForType() {
886        return org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.getDefaultInstance();
887      }
888      
889      public org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto build() {
890        org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto result = buildPartial();
891        if (!result.isInitialized()) {
892          throw newUninitializedMessageException(result);
893        }
894        return result;
895      }
896      
897      private org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto buildParsed()
898          throws com.google.protobuf.InvalidProtocolBufferException {
899        org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto result = buildPartial();
900        if (!result.isInitialized()) {
901          throw newUninitializedMessageException(
902            result).asInvalidProtocolBufferException();
903        }
904        return result;
905      }
906      
907      public org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto buildPartial() {
908        org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto result = new org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto(this);
909        int from_bitField0_ = bitField0_;
910        int to_bitField0_ = 0;
911        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
912          to_bitField0_ |= 0x00000001;
913        }
914        result.message_ = message_;
915        result.bitField0_ = to_bitField0_;
916        onBuilt();
917        return result;
918      }
919      
920      public Builder mergeFrom(com.google.protobuf.Message other) {
921        if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto) {
922          return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto)other);
923        } else {
924          super.mergeFrom(other);
925          return this;
926        }
927      }
928      
929      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto other) {
930        if (other == org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.getDefaultInstance()) return this;
931        if (other.hasMessage()) {
932          setMessage(other.getMessage());
933        }
934        this.mergeUnknownFields(other.getUnknownFields());
935        return this;
936      }
937      
938      public final boolean isInitialized() {
939        if (!hasMessage()) {
940          
941          return false;
942        }
943        return true;
944      }
945      
946      public Builder mergeFrom(
947          com.google.protobuf.CodedInputStream input,
948          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
949          throws java.io.IOException {
950        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
951          com.google.protobuf.UnknownFieldSet.newBuilder(
952            this.getUnknownFields());
953        while (true) {
954          int tag = input.readTag();
955          switch (tag) {
956            case 0:
957              this.setUnknownFields(unknownFields.build());
958              onChanged();
959              return this;
960            default: {
961              if (!parseUnknownField(input, unknownFields,
962                                     extensionRegistry, tag)) {
963                this.setUnknownFields(unknownFields.build());
964                onChanged();
965                return this;
966              }
967              break;
968            }
969            case 10: {
970              bitField0_ |= 0x00000001;
971              message_ = input.readBytes();
972              break;
973            }
974          }
975        }
976      }
977      
978      private int bitField0_;
979      
980      // required string message = 1;
981      private java.lang.Object message_ = "";
982      public boolean hasMessage() {
983        return ((bitField0_ & 0x00000001) == 0x00000001);
984      }
985      public String getMessage() {
986        java.lang.Object ref = message_;
987        if (!(ref instanceof String)) {
988          String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
989          message_ = s;
990          return s;
991        } else {
992          return (String) ref;
993        }
994      }
995      public Builder setMessage(String value) {
996        if (value == null) {
997    throw new NullPointerException();
998  }
999  bitField0_ |= 0x00000001;
1000        message_ = value;
1001        onChanged();
1002        return this;
1003      }
1004      public Builder clearMessage() {
1005        bitField0_ = (bitField0_ & ~0x00000001);
1006        message_ = getDefaultInstance().getMessage();
1007        onChanged();
1008        return this;
1009      }
1010      void setMessage(com.google.protobuf.ByteString value) {
1011        bitField0_ |= 0x00000001;
1012        message_ = value;
1013        onChanged();
1014      }
1015      
1016      // @@protoc_insertion_point(builder_scope:hadoop.common.EchoRequestProto)
1017    }
1018    
1019    static {
1020      defaultInstance = new EchoRequestProto(true);
1021      defaultInstance.initFields();
1022    }
1023    
1024    // @@protoc_insertion_point(class_scope:hadoop.common.EchoRequestProto)
1025  }
1026  
1027  public interface EchoResponseProtoOrBuilder
1028      extends com.google.protobuf.MessageOrBuilder {
1029    
1030    // required string message = 1;
1031    boolean hasMessage();
1032    String getMessage();
1033  }
1034  public static final class EchoResponseProto extends
1035      com.google.protobuf.GeneratedMessage
1036      implements EchoResponseProtoOrBuilder {
1037    // Use EchoResponseProto.newBuilder() to construct.
1038    private EchoResponseProto(Builder builder) {
1039      super(builder);
1040    }
1041    private EchoResponseProto(boolean noInit) {}
1042    
1043    private static final EchoResponseProto defaultInstance;
1044    public static EchoResponseProto getDefaultInstance() {
1045      return defaultInstance;
1046    }
1047    
1048    public EchoResponseProto getDefaultInstanceForType() {
1049      return defaultInstance;
1050    }
1051    
1052    public static final com.google.protobuf.Descriptors.Descriptor
1053        getDescriptor() {
1054      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoResponseProto_descriptor;
1055    }
1056    
1057    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1058        internalGetFieldAccessorTable() {
1059      return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoResponseProto_fieldAccessorTable;
1060    }
1061    
1062    private int bitField0_;
1063    // required string message = 1;
1064    public static final int MESSAGE_FIELD_NUMBER = 1;
1065    private java.lang.Object message_;
1066    public boolean hasMessage() {
1067      return ((bitField0_ & 0x00000001) == 0x00000001);
1068    }
1069    public String getMessage() {
1070      java.lang.Object ref = message_;
1071      if (ref instanceof String) {
1072        return (String) ref;
1073      } else {
1074        com.google.protobuf.ByteString bs = 
1075            (com.google.protobuf.ByteString) ref;
1076        String s = bs.toStringUtf8();
1077        if (com.google.protobuf.Internal.isValidUtf8(bs)) {
1078          message_ = s;
1079        }
1080        return s;
1081      }
1082    }
1083    private com.google.protobuf.ByteString getMessageBytes() {
1084      java.lang.Object ref = message_;
1085      if (ref instanceof String) {
1086        com.google.protobuf.ByteString b = 
1087            com.google.protobuf.ByteString.copyFromUtf8((String) ref);
1088        message_ = b;
1089        return b;
1090      } else {
1091        return (com.google.protobuf.ByteString) ref;
1092      }
1093    }
1094    
1095    private void initFields() {
1096      message_ = "";
1097    }
1098    private byte memoizedIsInitialized = -1;
1099    public final boolean isInitialized() {
1100      byte isInitialized = memoizedIsInitialized;
1101      if (isInitialized != -1) return isInitialized == 1;
1102      
1103      if (!hasMessage()) {
1104        memoizedIsInitialized = 0;
1105        return false;
1106      }
1107      memoizedIsInitialized = 1;
1108      return true;
1109    }
1110    
1111    public void writeTo(com.google.protobuf.CodedOutputStream output)
1112                        throws java.io.IOException {
1113      getSerializedSize();
1114      if (((bitField0_ & 0x00000001) == 0x00000001)) {
1115        output.writeBytes(1, getMessageBytes());
1116      }
1117      getUnknownFields().writeTo(output);
1118    }
1119    
1120    private int memoizedSerializedSize = -1;
1121    public int getSerializedSize() {
1122      int size = memoizedSerializedSize;
1123      if (size != -1) return size;
1124    
1125      size = 0;
1126      if (((bitField0_ & 0x00000001) == 0x00000001)) {
1127        size += com.google.protobuf.CodedOutputStream
1128          .computeBytesSize(1, getMessageBytes());
1129      }
1130      size += getUnknownFields().getSerializedSize();
1131      memoizedSerializedSize = size;
1132      return size;
1133    }
1134    
1135    private static final long serialVersionUID = 0L;
1136    @java.lang.Override
1137    protected java.lang.Object writeReplace()
1138        throws java.io.ObjectStreamException {
1139      return super.writeReplace();
1140    }
1141    
1142    @java.lang.Override
1143    public boolean equals(final java.lang.Object obj) {
1144      if (obj == this) {
1145       return true;
1146      }
1147      if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto)) {
1148        return super.equals(obj);
1149      }
1150      org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto other = (org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto) obj;
1151      
1152      boolean result = true;
1153      result = result && (hasMessage() == other.hasMessage());
1154      if (hasMessage()) {
1155        result = result && getMessage()
1156            .equals(other.getMessage());
1157      }
1158      result = result &&
1159          getUnknownFields().equals(other.getUnknownFields());
1160      return result;
1161    }
1162    
1163    @java.lang.Override
1164    public int hashCode() {
1165      int hash = 41;
1166      hash = (19 * hash) + getDescriptorForType().hashCode();
1167      if (hasMessage()) {
1168        hash = (37 * hash) + MESSAGE_FIELD_NUMBER;
1169        hash = (53 * hash) + getMessage().hashCode();
1170      }
1171      hash = (29 * hash) + getUnknownFields().hashCode();
1172      return hash;
1173    }
1174    
1175    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(
1176        com.google.protobuf.ByteString data)
1177        throws com.google.protobuf.InvalidProtocolBufferException {
1178      return newBuilder().mergeFrom(data).buildParsed();
1179    }
1180    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(
1181        com.google.protobuf.ByteString data,
1182        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1183        throws com.google.protobuf.InvalidProtocolBufferException {
1184      return newBuilder().mergeFrom(data, extensionRegistry)
1185               .buildParsed();
1186    }
1187    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(byte[] data)
1188        throws com.google.protobuf.InvalidProtocolBufferException {
1189      return newBuilder().mergeFrom(data).buildParsed();
1190    }
1191    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(
1192        byte[] data,
1193        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1194        throws com.google.protobuf.InvalidProtocolBufferException {
1195      return newBuilder().mergeFrom(data, extensionRegistry)
1196               .buildParsed();
1197    }
1198    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(java.io.InputStream input)
1199        throws java.io.IOException {
1200      return newBuilder().mergeFrom(input).buildParsed();
1201    }
1202    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(
1203        java.io.InputStream input,
1204        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1205        throws java.io.IOException {
1206      return newBuilder().mergeFrom(input, extensionRegistry)
1207               .buildParsed();
1208    }
1209    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseDelimitedFrom(java.io.InputStream input)
1210        throws java.io.IOException {
1211      Builder builder = newBuilder();
1212      if (builder.mergeDelimitedFrom(input)) {
1213        return builder.buildParsed();
1214      } else {
1215        return null;
1216      }
1217    }
1218    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseDelimitedFrom(
1219        java.io.InputStream input,
1220        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1221        throws java.io.IOException {
1222      Builder builder = newBuilder();
1223      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
1224        return builder.buildParsed();
1225      } else {
1226        return null;
1227      }
1228    }
1229    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(
1230        com.google.protobuf.CodedInputStream input)
1231        throws java.io.IOException {
1232      return newBuilder().mergeFrom(input).buildParsed();
1233    }
1234    public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(
1235        com.google.protobuf.CodedInputStream input,
1236        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1237        throws java.io.IOException {
1238      return newBuilder().mergeFrom(input, extensionRegistry)
1239               .buildParsed();
1240    }
1241    
1242    public static Builder newBuilder() { return Builder.create(); }
1243    public Builder newBuilderForType() { return newBuilder(); }
1244    public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto prototype) {
1245      return newBuilder().mergeFrom(prototype);
1246    }
1247    public Builder toBuilder() { return newBuilder(this); }
1248    
1249    @java.lang.Override
1250    protected Builder newBuilderForType(
1251        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1252      Builder builder = new Builder(parent);
1253      return builder;
1254    }
1255    public static final class Builder extends
1256        com.google.protobuf.GeneratedMessage.Builder<Builder>
1257       implements org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProtoOrBuilder {
1258      public static final com.google.protobuf.Descriptors.Descriptor
1259          getDescriptor() {
1260        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoResponseProto_descriptor;
1261      }
1262      
1263      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1264          internalGetFieldAccessorTable() {
1265        return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_hadoop_common_EchoResponseProto_fieldAccessorTable;
1266      }
1267      
1268      // Construct using org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.newBuilder()
1269      private Builder() {
1270        maybeForceBuilderInitialization();
1271      }
1272      
1273      private Builder(BuilderParent parent) {
1274        super(parent);
1275        maybeForceBuilderInitialization();
1276      }
1277      private void maybeForceBuilderInitialization() {
1278        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
1279        }
1280      }
1281      private static Builder create() {
1282        return new Builder();
1283      }
1284      
1285      public Builder clear() {
1286        super.clear();
1287        message_ = "";
1288        bitField0_ = (bitField0_ & ~0x00000001);
1289        return this;
1290      }
1291      
1292      public Builder clone() {
1293        return create().mergeFrom(buildPartial());
1294      }
1295      
1296      public com.google.protobuf.Descriptors.Descriptor
1297          getDescriptorForType() {
1298        return org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDescriptor();
1299      }
1300      
1301      public org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto getDefaultInstanceForType() {
1302        return org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance();
1303      }
1304      
1305      public org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto build() {
1306        org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto result = buildPartial();
1307        if (!result.isInitialized()) {
1308          throw newUninitializedMessageException(result);
1309        }
1310        return result;
1311      }
1312      
1313      private org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto buildParsed()
1314          throws com.google.protobuf.InvalidProtocolBufferException {
1315        org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto result = buildPartial();
1316        if (!result.isInitialized()) {
1317          throw newUninitializedMessageException(
1318            result).asInvalidProtocolBufferException();
1319        }
1320        return result;
1321      }
1322      
1323      public org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto buildPartial() {
1324        org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto result = new org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto(this);
1325        int from_bitField0_ = bitField0_;
1326        int to_bitField0_ = 0;
1327        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
1328          to_bitField0_ |= 0x00000001;
1329        }
1330        result.message_ = message_;
1331        result.bitField0_ = to_bitField0_;
1332        onBuilt();
1333        return result;
1334      }
1335      
1336      public Builder mergeFrom(com.google.protobuf.Message other) {
1337        if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto) {
1338          return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto)other);
1339        } else {
1340          super.mergeFrom(other);
1341          return this;
1342        }
1343      }
1344      
1345      public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto other) {
1346        if (other == org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance()) return this;
1347        if (other.hasMessage()) {
1348          setMessage(other.getMessage());
1349        }
1350        this.mergeUnknownFields(other.getUnknownFields());
1351        return this;
1352      }
1353      
1354      public final boolean isInitialized() {
1355        if (!hasMessage()) {
1356          
1357          return false;
1358        }
1359        return true;
1360      }
1361      
1362      public Builder mergeFrom(
1363          com.google.protobuf.CodedInputStream input,
1364          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1365          throws java.io.IOException {
1366        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
1367          com.google.protobuf.UnknownFieldSet.newBuilder(
1368            this.getUnknownFields());
1369        while (true) {
1370          int tag = input.readTag();
1371          switch (tag) {
1372            case 0:
1373              this.setUnknownFields(unknownFields.build());
1374              onChanged();
1375              return this;
1376            default: {
1377              if (!parseUnknownField(input, unknownFields,
1378                                     extensionRegistry, tag)) {
1379                this.setUnknownFields(unknownFields.build());
1380                onChanged();
1381                return this;
1382              }
1383              break;
1384            }
1385            case 10: {
1386              bitField0_ |= 0x00000001;
1387              message_ = input.readBytes();
1388              break;
1389            }
1390          }
1391        }
1392      }
1393      
1394      private int bitField0_;
1395      
1396      // required string message = 1;
1397      private java.lang.Object message_ = "";
1398      public boolean hasMessage() {
1399        return ((bitField0_ & 0x00000001) == 0x00000001);
1400      }
1401      public String getMessage() {
1402        java.lang.Object ref = message_;
1403        if (!(ref instanceof String)) {
1404          String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
1405          message_ = s;
1406          return s;
1407        } else {
1408          return (String) ref;
1409        }
1410      }
1411      public Builder setMessage(String value) {
1412        if (value == null) {
1413    throw new NullPointerException();
1414  }
1415  bitField0_ |= 0x00000001;
1416        message_ = value;
1417        onChanged();
1418        return this;
1419      }
1420      public Builder clearMessage() {
1421        bitField0_ = (bitField0_ & ~0x00000001);
1422        message_ = getDefaultInstance().getMessage();
1423        onChanged();
1424        return this;
1425      }
1426      void setMessage(com.google.protobuf.ByteString value) {
1427        bitField0_ |= 0x00000001;
1428        message_ = value;
1429        onChanged();
1430      }
1431      
1432      // @@protoc_insertion_point(builder_scope:hadoop.common.EchoResponseProto)
1433    }
1434    
1435    static {
1436      defaultInstance = new EchoResponseProto(true);
1437      defaultInstance.initFields();
1438    }
1439    
1440    // @@protoc_insertion_point(class_scope:hadoop.common.EchoResponseProto)
1441  }
1442  
1443  private static com.google.protobuf.Descriptors.Descriptor
1444    internal_static_hadoop_common_EmptyRequestProto_descriptor;
1445  private static
1446    com.google.protobuf.GeneratedMessage.FieldAccessorTable
1447      internal_static_hadoop_common_EmptyRequestProto_fieldAccessorTable;
1448  private static com.google.protobuf.Descriptors.Descriptor
1449    internal_static_hadoop_common_EmptyResponseProto_descriptor;
1450  private static
1451    com.google.protobuf.GeneratedMessage.FieldAccessorTable
1452      internal_static_hadoop_common_EmptyResponseProto_fieldAccessorTable;
1453  private static com.google.protobuf.Descriptors.Descriptor
1454    internal_static_hadoop_common_EchoRequestProto_descriptor;
1455  private static
1456    com.google.protobuf.GeneratedMessage.FieldAccessorTable
1457      internal_static_hadoop_common_EchoRequestProto_fieldAccessorTable;
1458  private static com.google.protobuf.Descriptors.Descriptor
1459    internal_static_hadoop_common_EchoResponseProto_descriptor;
1460  private static
1461    com.google.protobuf.GeneratedMessage.FieldAccessorTable
1462      internal_static_hadoop_common_EchoResponseProto_fieldAccessorTable;
1463  
1464  public static com.google.protobuf.Descriptors.FileDescriptor
1465      getDescriptor() {
1466    return descriptor;
1467  }
1468  private static com.google.protobuf.Descriptors.FileDescriptor
1469      descriptor;
1470  static {
1471    java.lang.String[] descriptorData = {
1472      "\n\ntest.proto\022\rhadoop.common\"\023\n\021EmptyRequ" +
1473      "estProto\"\024\n\022EmptyResponseProto\"#\n\020EchoRe" +
1474      "questProto\022\017\n\007message\030\001 \002(\t\"$\n\021EchoRespo" +
1475      "nseProto\022\017\n\007message\030\001 \002(\tB/\n\036org.apache." +
1476      "hadoop.ipc.protobufB\nTestProtos\240\001\001"
1477    };
1478    com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
1479      new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
1480        public com.google.protobuf.ExtensionRegistry assignDescriptors(
1481            com.google.protobuf.Descriptors.FileDescriptor root) {
1482          descriptor = root;
1483          internal_static_hadoop_common_EmptyRequestProto_descriptor =
1484            getDescriptor().getMessageTypes().get(0);
1485          internal_static_hadoop_common_EmptyRequestProto_fieldAccessorTable = new
1486            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
1487              internal_static_hadoop_common_EmptyRequestProto_descriptor,
1488              new java.lang.String[] { },
1489              org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.class,
1490              org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.Builder.class);
1491          internal_static_hadoop_common_EmptyResponseProto_descriptor =
1492            getDescriptor().getMessageTypes().get(1);
1493          internal_static_hadoop_common_EmptyResponseProto_fieldAccessorTable = new
1494            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
1495              internal_static_hadoop_common_EmptyResponseProto_descriptor,
1496              new java.lang.String[] { },
1497              org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.class,
1498              org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.Builder.class);
1499          internal_static_hadoop_common_EchoRequestProto_descriptor =
1500            getDescriptor().getMessageTypes().get(2);
1501          internal_static_hadoop_common_EchoRequestProto_fieldAccessorTable = new
1502            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
1503              internal_static_hadoop_common_EchoRequestProto_descriptor,
1504              new java.lang.String[] { "Message", },
1505              org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.class,
1506              org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.Builder.class);
1507          internal_static_hadoop_common_EchoResponseProto_descriptor =
1508            getDescriptor().getMessageTypes().get(3);
1509          internal_static_hadoop_common_EchoResponseProto_fieldAccessorTable = new
1510            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
1511              internal_static_hadoop_common_EchoResponseProto_descriptor,
1512              new java.lang.String[] { "Message", },
1513              org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.class,
1514              org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.Builder.class);
1515          return null;
1516        }
1517      };
1518    com.google.protobuf.Descriptors.FileDescriptor
1519      .internalBuildGeneratedFileFrom(descriptorData,
1520        new com.google.protobuf.Descriptors.FileDescriptor[] {
1521        }, assigner);
1522  }
1523  
1524  // @@protoc_insertion_point(outer_class_scope)
1525}