001 // Generated by the protocol buffer compiler. DO NOT EDIT!
002 // source: ProtocolInfo.proto
003
004 package org.apache.hadoop.ipc.protobuf;
005
006 public final class ProtocolInfoProtos {
007 private ProtocolInfoProtos() {}
008 public static void registerAllExtensions(
009 com.google.protobuf.ExtensionRegistry registry) {
010 }
011 public interface GetProtocolVersionsRequestProtoOrBuilder
012 extends com.google.protobuf.MessageOrBuilder {
013
014 // required string protocol = 1;
015 boolean hasProtocol();
016 String getProtocol();
017 }
018 public static final class GetProtocolVersionsRequestProto extends
019 com.google.protobuf.GeneratedMessage
020 implements GetProtocolVersionsRequestProtoOrBuilder {
021 // Use GetProtocolVersionsRequestProto.newBuilder() to construct.
022 private GetProtocolVersionsRequestProto(Builder builder) {
023 super(builder);
024 }
025 private GetProtocolVersionsRequestProto(boolean noInit) {}
026
027 private static final GetProtocolVersionsRequestProto defaultInstance;
028 public static GetProtocolVersionsRequestProto getDefaultInstance() {
029 return defaultInstance;
030 }
031
032 public GetProtocolVersionsRequestProto getDefaultInstanceForType() {
033 return defaultInstance;
034 }
035
036 public static final com.google.protobuf.Descriptors.Descriptor
037 getDescriptor() {
038 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_GetProtocolVersionsRequestProto_descriptor;
039 }
040
041 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
042 internalGetFieldAccessorTable() {
043 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_GetProtocolVersionsRequestProto_fieldAccessorTable;
044 }
045
046 private int bitField0_;
047 // required string protocol = 1;
048 public static final int PROTOCOL_FIELD_NUMBER = 1;
049 private java.lang.Object protocol_;
050 public boolean hasProtocol() {
051 return ((bitField0_ & 0x00000001) == 0x00000001);
052 }
053 public String getProtocol() {
054 java.lang.Object ref = protocol_;
055 if (ref instanceof String) {
056 return (String) ref;
057 } else {
058 com.google.protobuf.ByteString bs =
059 (com.google.protobuf.ByteString) ref;
060 String s = bs.toStringUtf8();
061 if (com.google.protobuf.Internal.isValidUtf8(bs)) {
062 protocol_ = s;
063 }
064 return s;
065 }
066 }
067 private com.google.protobuf.ByteString getProtocolBytes() {
068 java.lang.Object ref = protocol_;
069 if (ref instanceof String) {
070 com.google.protobuf.ByteString b =
071 com.google.protobuf.ByteString.copyFromUtf8((String) ref);
072 protocol_ = b;
073 return b;
074 } else {
075 return (com.google.protobuf.ByteString) ref;
076 }
077 }
078
079 private void initFields() {
080 protocol_ = "";
081 }
082 private byte memoizedIsInitialized = -1;
083 public final boolean isInitialized() {
084 byte isInitialized = memoizedIsInitialized;
085 if (isInitialized != -1) return isInitialized == 1;
086
087 if (!hasProtocol()) {
088 memoizedIsInitialized = 0;
089 return false;
090 }
091 memoizedIsInitialized = 1;
092 return true;
093 }
094
095 public void writeTo(com.google.protobuf.CodedOutputStream output)
096 throws java.io.IOException {
097 getSerializedSize();
098 if (((bitField0_ & 0x00000001) == 0x00000001)) {
099 output.writeBytes(1, getProtocolBytes());
100 }
101 getUnknownFields().writeTo(output);
102 }
103
104 private int memoizedSerializedSize = -1;
105 public int getSerializedSize() {
106 int size = memoizedSerializedSize;
107 if (size != -1) return size;
108
109 size = 0;
110 if (((bitField0_ & 0x00000001) == 0x00000001)) {
111 size += com.google.protobuf.CodedOutputStream
112 .computeBytesSize(1, getProtocolBytes());
113 }
114 size += getUnknownFields().getSerializedSize();
115 memoizedSerializedSize = size;
116 return size;
117 }
118
119 private static final long serialVersionUID = 0L;
120 @java.lang.Override
121 protected java.lang.Object writeReplace()
122 throws java.io.ObjectStreamException {
123 return super.writeReplace();
124 }
125
126 @java.lang.Override
127 public boolean equals(final java.lang.Object obj) {
128 if (obj == this) {
129 return true;
130 }
131 if (!(obj instanceof org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto)) {
132 return super.equals(obj);
133 }
134 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto other = (org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto) obj;
135
136 boolean result = true;
137 result = result && (hasProtocol() == other.hasProtocol());
138 if (hasProtocol()) {
139 result = result && getProtocol()
140 .equals(other.getProtocol());
141 }
142 result = result &&
143 getUnknownFields().equals(other.getUnknownFields());
144 return result;
145 }
146
147 @java.lang.Override
148 public int hashCode() {
149 int hash = 41;
150 hash = (19 * hash) + getDescriptorForType().hashCode();
151 if (hasProtocol()) {
152 hash = (37 * hash) + PROTOCOL_FIELD_NUMBER;
153 hash = (53 * hash) + getProtocol().hashCode();
154 }
155 hash = (29 * hash) + getUnknownFields().hashCode();
156 return hash;
157 }
158
159 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto parseFrom(
160 com.google.protobuf.ByteString data)
161 throws com.google.protobuf.InvalidProtocolBufferException {
162 return newBuilder().mergeFrom(data).buildParsed();
163 }
164 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto parseFrom(
165 com.google.protobuf.ByteString data,
166 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
167 throws com.google.protobuf.InvalidProtocolBufferException {
168 return newBuilder().mergeFrom(data, extensionRegistry)
169 .buildParsed();
170 }
171 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto parseFrom(byte[] data)
172 throws com.google.protobuf.InvalidProtocolBufferException {
173 return newBuilder().mergeFrom(data).buildParsed();
174 }
175 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto parseFrom(
176 byte[] data,
177 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
178 throws com.google.protobuf.InvalidProtocolBufferException {
179 return newBuilder().mergeFrom(data, extensionRegistry)
180 .buildParsed();
181 }
182 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto parseFrom(java.io.InputStream input)
183 throws java.io.IOException {
184 return newBuilder().mergeFrom(input).buildParsed();
185 }
186 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto parseFrom(
187 java.io.InputStream input,
188 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
189 throws java.io.IOException {
190 return newBuilder().mergeFrom(input, extensionRegistry)
191 .buildParsed();
192 }
193 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto parseDelimitedFrom(java.io.InputStream input)
194 throws java.io.IOException {
195 Builder builder = newBuilder();
196 if (builder.mergeDelimitedFrom(input)) {
197 return builder.buildParsed();
198 } else {
199 return null;
200 }
201 }
202 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto parseDelimitedFrom(
203 java.io.InputStream input,
204 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
205 throws java.io.IOException {
206 Builder builder = newBuilder();
207 if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
208 return builder.buildParsed();
209 } else {
210 return null;
211 }
212 }
213 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto parseFrom(
214 com.google.protobuf.CodedInputStream input)
215 throws java.io.IOException {
216 return newBuilder().mergeFrom(input).buildParsed();
217 }
218 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto parseFrom(
219 com.google.protobuf.CodedInputStream input,
220 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
221 throws java.io.IOException {
222 return newBuilder().mergeFrom(input, extensionRegistry)
223 .buildParsed();
224 }
225
226 public static Builder newBuilder() { return Builder.create(); }
227 public Builder newBuilderForType() { return newBuilder(); }
228 public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto prototype) {
229 return newBuilder().mergeFrom(prototype);
230 }
231 public Builder toBuilder() { return newBuilder(this); }
232
233 @java.lang.Override
234 protected Builder newBuilderForType(
235 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
236 Builder builder = new Builder(parent);
237 return builder;
238 }
239 public static final class Builder extends
240 com.google.protobuf.GeneratedMessage.Builder<Builder>
241 implements org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProtoOrBuilder {
242 public static final com.google.protobuf.Descriptors.Descriptor
243 getDescriptor() {
244 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_GetProtocolVersionsRequestProto_descriptor;
245 }
246
247 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
248 internalGetFieldAccessorTable() {
249 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_GetProtocolVersionsRequestProto_fieldAccessorTable;
250 }
251
252 // Construct using org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto.newBuilder()
253 private Builder() {
254 maybeForceBuilderInitialization();
255 }
256
257 private Builder(BuilderParent parent) {
258 super(parent);
259 maybeForceBuilderInitialization();
260 }
261 private void maybeForceBuilderInitialization() {
262 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
263 }
264 }
265 private static Builder create() {
266 return new Builder();
267 }
268
269 public Builder clear() {
270 super.clear();
271 protocol_ = "";
272 bitField0_ = (bitField0_ & ~0x00000001);
273 return this;
274 }
275
276 public Builder clone() {
277 return create().mergeFrom(buildPartial());
278 }
279
280 public com.google.protobuf.Descriptors.Descriptor
281 getDescriptorForType() {
282 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto.getDescriptor();
283 }
284
285 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto getDefaultInstanceForType() {
286 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto.getDefaultInstance();
287 }
288
289 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto build() {
290 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto result = buildPartial();
291 if (!result.isInitialized()) {
292 throw newUninitializedMessageException(result);
293 }
294 return result;
295 }
296
297 private org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto buildParsed()
298 throws com.google.protobuf.InvalidProtocolBufferException {
299 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto result = buildPartial();
300 if (!result.isInitialized()) {
301 throw newUninitializedMessageException(
302 result).asInvalidProtocolBufferException();
303 }
304 return result;
305 }
306
307 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto buildPartial() {
308 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto result = new org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto(this);
309 int from_bitField0_ = bitField0_;
310 int to_bitField0_ = 0;
311 if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
312 to_bitField0_ |= 0x00000001;
313 }
314 result.protocol_ = protocol_;
315 result.bitField0_ = to_bitField0_;
316 onBuilt();
317 return result;
318 }
319
320 public Builder mergeFrom(com.google.protobuf.Message other) {
321 if (other instanceof org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto) {
322 return mergeFrom((org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto)other);
323 } else {
324 super.mergeFrom(other);
325 return this;
326 }
327 }
328
329 public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto other) {
330 if (other == org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto.getDefaultInstance()) return this;
331 if (other.hasProtocol()) {
332 setProtocol(other.getProtocol());
333 }
334 this.mergeUnknownFields(other.getUnknownFields());
335 return this;
336 }
337
338 public final boolean isInitialized() {
339 if (!hasProtocol()) {
340
341 return false;
342 }
343 return true;
344 }
345
346 public Builder mergeFrom(
347 com.google.protobuf.CodedInputStream input,
348 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
349 throws java.io.IOException {
350 com.google.protobuf.UnknownFieldSet.Builder unknownFields =
351 com.google.protobuf.UnknownFieldSet.newBuilder(
352 this.getUnknownFields());
353 while (true) {
354 int tag = input.readTag();
355 switch (tag) {
356 case 0:
357 this.setUnknownFields(unknownFields.build());
358 onChanged();
359 return this;
360 default: {
361 if (!parseUnknownField(input, unknownFields,
362 extensionRegistry, tag)) {
363 this.setUnknownFields(unknownFields.build());
364 onChanged();
365 return this;
366 }
367 break;
368 }
369 case 10: {
370 bitField0_ |= 0x00000001;
371 protocol_ = input.readBytes();
372 break;
373 }
374 }
375 }
376 }
377
378 private int bitField0_;
379
380 // required string protocol = 1;
381 private java.lang.Object protocol_ = "";
382 public boolean hasProtocol() {
383 return ((bitField0_ & 0x00000001) == 0x00000001);
384 }
385 public String getProtocol() {
386 java.lang.Object ref = protocol_;
387 if (!(ref instanceof String)) {
388 String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
389 protocol_ = s;
390 return s;
391 } else {
392 return (String) ref;
393 }
394 }
395 public Builder setProtocol(String value) {
396 if (value == null) {
397 throw new NullPointerException();
398 }
399 bitField0_ |= 0x00000001;
400 protocol_ = value;
401 onChanged();
402 return this;
403 }
404 public Builder clearProtocol() {
405 bitField0_ = (bitField0_ & ~0x00000001);
406 protocol_ = getDefaultInstance().getProtocol();
407 onChanged();
408 return this;
409 }
410 void setProtocol(com.google.protobuf.ByteString value) {
411 bitField0_ |= 0x00000001;
412 protocol_ = value;
413 onChanged();
414 }
415
416 // @@protoc_insertion_point(builder_scope:hadoop.common.GetProtocolVersionsRequestProto)
417 }
418
419 static {
420 defaultInstance = new GetProtocolVersionsRequestProto(true);
421 defaultInstance.initFields();
422 }
423
424 // @@protoc_insertion_point(class_scope:hadoop.common.GetProtocolVersionsRequestProto)
425 }
426
427 public interface ProtocolVersionProtoOrBuilder
428 extends com.google.protobuf.MessageOrBuilder {
429
430 // required string rpcKind = 1;
431 boolean hasRpcKind();
432 String getRpcKind();
433
434 // repeated uint64 versions = 2;
435 java.util.List<java.lang.Long> getVersionsList();
436 int getVersionsCount();
437 long getVersions(int index);
438 }
439 public static final class ProtocolVersionProto extends
440 com.google.protobuf.GeneratedMessage
441 implements ProtocolVersionProtoOrBuilder {
442 // Use ProtocolVersionProto.newBuilder() to construct.
443 private ProtocolVersionProto(Builder builder) {
444 super(builder);
445 }
446 private ProtocolVersionProto(boolean noInit) {}
447
448 private static final ProtocolVersionProto defaultInstance;
449 public static ProtocolVersionProto getDefaultInstance() {
450 return defaultInstance;
451 }
452
453 public ProtocolVersionProto getDefaultInstanceForType() {
454 return defaultInstance;
455 }
456
457 public static final com.google.protobuf.Descriptors.Descriptor
458 getDescriptor() {
459 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_ProtocolVersionProto_descriptor;
460 }
461
462 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
463 internalGetFieldAccessorTable() {
464 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_ProtocolVersionProto_fieldAccessorTable;
465 }
466
467 private int bitField0_;
468 // required string rpcKind = 1;
469 public static final int RPCKIND_FIELD_NUMBER = 1;
470 private java.lang.Object rpcKind_;
471 public boolean hasRpcKind() {
472 return ((bitField0_ & 0x00000001) == 0x00000001);
473 }
474 public String getRpcKind() {
475 java.lang.Object ref = rpcKind_;
476 if (ref instanceof String) {
477 return (String) ref;
478 } else {
479 com.google.protobuf.ByteString bs =
480 (com.google.protobuf.ByteString) ref;
481 String s = bs.toStringUtf8();
482 if (com.google.protobuf.Internal.isValidUtf8(bs)) {
483 rpcKind_ = s;
484 }
485 return s;
486 }
487 }
488 private com.google.protobuf.ByteString getRpcKindBytes() {
489 java.lang.Object ref = rpcKind_;
490 if (ref instanceof String) {
491 com.google.protobuf.ByteString b =
492 com.google.protobuf.ByteString.copyFromUtf8((String) ref);
493 rpcKind_ = b;
494 return b;
495 } else {
496 return (com.google.protobuf.ByteString) ref;
497 }
498 }
499
500 // repeated uint64 versions = 2;
501 public static final int VERSIONS_FIELD_NUMBER = 2;
502 private java.util.List<java.lang.Long> versions_;
503 public java.util.List<java.lang.Long>
504 getVersionsList() {
505 return versions_;
506 }
507 public int getVersionsCount() {
508 return versions_.size();
509 }
510 public long getVersions(int index) {
511 return versions_.get(index);
512 }
513
514 private void initFields() {
515 rpcKind_ = "";
516 versions_ = java.util.Collections.emptyList();;
517 }
518 private byte memoizedIsInitialized = -1;
519 public final boolean isInitialized() {
520 byte isInitialized = memoizedIsInitialized;
521 if (isInitialized != -1) return isInitialized == 1;
522
523 if (!hasRpcKind()) {
524 memoizedIsInitialized = 0;
525 return false;
526 }
527 memoizedIsInitialized = 1;
528 return true;
529 }
530
531 public void writeTo(com.google.protobuf.CodedOutputStream output)
532 throws java.io.IOException {
533 getSerializedSize();
534 if (((bitField0_ & 0x00000001) == 0x00000001)) {
535 output.writeBytes(1, getRpcKindBytes());
536 }
537 for (int i = 0; i < versions_.size(); i++) {
538 output.writeUInt64(2, versions_.get(i));
539 }
540 getUnknownFields().writeTo(output);
541 }
542
543 private int memoizedSerializedSize = -1;
544 public int getSerializedSize() {
545 int size = memoizedSerializedSize;
546 if (size != -1) return size;
547
548 size = 0;
549 if (((bitField0_ & 0x00000001) == 0x00000001)) {
550 size += com.google.protobuf.CodedOutputStream
551 .computeBytesSize(1, getRpcKindBytes());
552 }
553 {
554 int dataSize = 0;
555 for (int i = 0; i < versions_.size(); i++) {
556 dataSize += com.google.protobuf.CodedOutputStream
557 .computeUInt64SizeNoTag(versions_.get(i));
558 }
559 size += dataSize;
560 size += 1 * getVersionsList().size();
561 }
562 size += getUnknownFields().getSerializedSize();
563 memoizedSerializedSize = size;
564 return size;
565 }
566
567 private static final long serialVersionUID = 0L;
568 @java.lang.Override
569 protected java.lang.Object writeReplace()
570 throws java.io.ObjectStreamException {
571 return super.writeReplace();
572 }
573
574 @java.lang.Override
575 public boolean equals(final java.lang.Object obj) {
576 if (obj == this) {
577 return true;
578 }
579 if (!(obj instanceof org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto)) {
580 return super.equals(obj);
581 }
582 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto other = (org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto) obj;
583
584 boolean result = true;
585 result = result && (hasRpcKind() == other.hasRpcKind());
586 if (hasRpcKind()) {
587 result = result && getRpcKind()
588 .equals(other.getRpcKind());
589 }
590 result = result && getVersionsList()
591 .equals(other.getVersionsList());
592 result = result &&
593 getUnknownFields().equals(other.getUnknownFields());
594 return result;
595 }
596
597 @java.lang.Override
598 public int hashCode() {
599 int hash = 41;
600 hash = (19 * hash) + getDescriptorForType().hashCode();
601 if (hasRpcKind()) {
602 hash = (37 * hash) + RPCKIND_FIELD_NUMBER;
603 hash = (53 * hash) + getRpcKind().hashCode();
604 }
605 if (getVersionsCount() > 0) {
606 hash = (37 * hash) + VERSIONS_FIELD_NUMBER;
607 hash = (53 * hash) + getVersionsList().hashCode();
608 }
609 hash = (29 * hash) + getUnknownFields().hashCode();
610 return hash;
611 }
612
613 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto parseFrom(
614 com.google.protobuf.ByteString data)
615 throws com.google.protobuf.InvalidProtocolBufferException {
616 return newBuilder().mergeFrom(data).buildParsed();
617 }
618 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto parseFrom(
619 com.google.protobuf.ByteString data,
620 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
621 throws com.google.protobuf.InvalidProtocolBufferException {
622 return newBuilder().mergeFrom(data, extensionRegistry)
623 .buildParsed();
624 }
625 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto parseFrom(byte[] data)
626 throws com.google.protobuf.InvalidProtocolBufferException {
627 return newBuilder().mergeFrom(data).buildParsed();
628 }
629 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto parseFrom(
630 byte[] data,
631 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
632 throws com.google.protobuf.InvalidProtocolBufferException {
633 return newBuilder().mergeFrom(data, extensionRegistry)
634 .buildParsed();
635 }
636 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto parseFrom(java.io.InputStream input)
637 throws java.io.IOException {
638 return newBuilder().mergeFrom(input).buildParsed();
639 }
640 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto parseFrom(
641 java.io.InputStream input,
642 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
643 throws java.io.IOException {
644 return newBuilder().mergeFrom(input, extensionRegistry)
645 .buildParsed();
646 }
647 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto parseDelimitedFrom(java.io.InputStream input)
648 throws java.io.IOException {
649 Builder builder = newBuilder();
650 if (builder.mergeDelimitedFrom(input)) {
651 return builder.buildParsed();
652 } else {
653 return null;
654 }
655 }
656 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto parseDelimitedFrom(
657 java.io.InputStream input,
658 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
659 throws java.io.IOException {
660 Builder builder = newBuilder();
661 if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
662 return builder.buildParsed();
663 } else {
664 return null;
665 }
666 }
667 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto parseFrom(
668 com.google.protobuf.CodedInputStream input)
669 throws java.io.IOException {
670 return newBuilder().mergeFrom(input).buildParsed();
671 }
672 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto parseFrom(
673 com.google.protobuf.CodedInputStream input,
674 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
675 throws java.io.IOException {
676 return newBuilder().mergeFrom(input, extensionRegistry)
677 .buildParsed();
678 }
679
680 public static Builder newBuilder() { return Builder.create(); }
681 public Builder newBuilderForType() { return newBuilder(); }
682 public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto prototype) {
683 return newBuilder().mergeFrom(prototype);
684 }
685 public Builder toBuilder() { return newBuilder(this); }
686
687 @java.lang.Override
688 protected Builder newBuilderForType(
689 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
690 Builder builder = new Builder(parent);
691 return builder;
692 }
693 public static final class Builder extends
694 com.google.protobuf.GeneratedMessage.Builder<Builder>
695 implements org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProtoOrBuilder {
696 public static final com.google.protobuf.Descriptors.Descriptor
697 getDescriptor() {
698 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_ProtocolVersionProto_descriptor;
699 }
700
701 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
702 internalGetFieldAccessorTable() {
703 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_ProtocolVersionProto_fieldAccessorTable;
704 }
705
706 // Construct using org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto.newBuilder()
707 private Builder() {
708 maybeForceBuilderInitialization();
709 }
710
711 private Builder(BuilderParent parent) {
712 super(parent);
713 maybeForceBuilderInitialization();
714 }
715 private void maybeForceBuilderInitialization() {
716 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
717 }
718 }
719 private static Builder create() {
720 return new Builder();
721 }
722
723 public Builder clear() {
724 super.clear();
725 rpcKind_ = "";
726 bitField0_ = (bitField0_ & ~0x00000001);
727 versions_ = java.util.Collections.emptyList();;
728 bitField0_ = (bitField0_ & ~0x00000002);
729 return this;
730 }
731
732 public Builder clone() {
733 return create().mergeFrom(buildPartial());
734 }
735
736 public com.google.protobuf.Descriptors.Descriptor
737 getDescriptorForType() {
738 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto.getDescriptor();
739 }
740
741 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto getDefaultInstanceForType() {
742 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto.getDefaultInstance();
743 }
744
745 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto build() {
746 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto result = buildPartial();
747 if (!result.isInitialized()) {
748 throw newUninitializedMessageException(result);
749 }
750 return result;
751 }
752
753 private org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto buildParsed()
754 throws com.google.protobuf.InvalidProtocolBufferException {
755 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto result = buildPartial();
756 if (!result.isInitialized()) {
757 throw newUninitializedMessageException(
758 result).asInvalidProtocolBufferException();
759 }
760 return result;
761 }
762
763 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto buildPartial() {
764 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto result = new org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto(this);
765 int from_bitField0_ = bitField0_;
766 int to_bitField0_ = 0;
767 if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
768 to_bitField0_ |= 0x00000001;
769 }
770 result.rpcKind_ = rpcKind_;
771 if (((bitField0_ & 0x00000002) == 0x00000002)) {
772 versions_ = java.util.Collections.unmodifiableList(versions_);
773 bitField0_ = (bitField0_ & ~0x00000002);
774 }
775 result.versions_ = versions_;
776 result.bitField0_ = to_bitField0_;
777 onBuilt();
778 return result;
779 }
780
781 public Builder mergeFrom(com.google.protobuf.Message other) {
782 if (other instanceof org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto) {
783 return mergeFrom((org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto)other);
784 } else {
785 super.mergeFrom(other);
786 return this;
787 }
788 }
789
790 public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto other) {
791 if (other == org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto.getDefaultInstance()) return this;
792 if (other.hasRpcKind()) {
793 setRpcKind(other.getRpcKind());
794 }
795 if (!other.versions_.isEmpty()) {
796 if (versions_.isEmpty()) {
797 versions_ = other.versions_;
798 bitField0_ = (bitField0_ & ~0x00000002);
799 } else {
800 ensureVersionsIsMutable();
801 versions_.addAll(other.versions_);
802 }
803 onChanged();
804 }
805 this.mergeUnknownFields(other.getUnknownFields());
806 return this;
807 }
808
809 public final boolean isInitialized() {
810 if (!hasRpcKind()) {
811
812 return false;
813 }
814 return true;
815 }
816
817 public Builder mergeFrom(
818 com.google.protobuf.CodedInputStream input,
819 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
820 throws java.io.IOException {
821 com.google.protobuf.UnknownFieldSet.Builder unknownFields =
822 com.google.protobuf.UnknownFieldSet.newBuilder(
823 this.getUnknownFields());
824 while (true) {
825 int tag = input.readTag();
826 switch (tag) {
827 case 0:
828 this.setUnknownFields(unknownFields.build());
829 onChanged();
830 return this;
831 default: {
832 if (!parseUnknownField(input, unknownFields,
833 extensionRegistry, tag)) {
834 this.setUnknownFields(unknownFields.build());
835 onChanged();
836 return this;
837 }
838 break;
839 }
840 case 10: {
841 bitField0_ |= 0x00000001;
842 rpcKind_ = input.readBytes();
843 break;
844 }
845 case 16: {
846 ensureVersionsIsMutable();
847 versions_.add(input.readUInt64());
848 break;
849 }
850 case 18: {
851 int length = input.readRawVarint32();
852 int limit = input.pushLimit(length);
853 while (input.getBytesUntilLimit() > 0) {
854 addVersions(input.readUInt64());
855 }
856 input.popLimit(limit);
857 break;
858 }
859 }
860 }
861 }
862
863 private int bitField0_;
864
865 // required string rpcKind = 1;
866 private java.lang.Object rpcKind_ = "";
867 public boolean hasRpcKind() {
868 return ((bitField0_ & 0x00000001) == 0x00000001);
869 }
870 public String getRpcKind() {
871 java.lang.Object ref = rpcKind_;
872 if (!(ref instanceof String)) {
873 String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
874 rpcKind_ = s;
875 return s;
876 } else {
877 return (String) ref;
878 }
879 }
880 public Builder setRpcKind(String value) {
881 if (value == null) {
882 throw new NullPointerException();
883 }
884 bitField0_ |= 0x00000001;
885 rpcKind_ = value;
886 onChanged();
887 return this;
888 }
889 public Builder clearRpcKind() {
890 bitField0_ = (bitField0_ & ~0x00000001);
891 rpcKind_ = getDefaultInstance().getRpcKind();
892 onChanged();
893 return this;
894 }
895 void setRpcKind(com.google.protobuf.ByteString value) {
896 bitField0_ |= 0x00000001;
897 rpcKind_ = value;
898 onChanged();
899 }
900
901 // repeated uint64 versions = 2;
902 private java.util.List<java.lang.Long> versions_ = java.util.Collections.emptyList();;
903 private void ensureVersionsIsMutable() {
904 if (!((bitField0_ & 0x00000002) == 0x00000002)) {
905 versions_ = new java.util.ArrayList<java.lang.Long>(versions_);
906 bitField0_ |= 0x00000002;
907 }
908 }
909 public java.util.List<java.lang.Long>
910 getVersionsList() {
911 return java.util.Collections.unmodifiableList(versions_);
912 }
913 public int getVersionsCount() {
914 return versions_.size();
915 }
916 public long getVersions(int index) {
917 return versions_.get(index);
918 }
919 public Builder setVersions(
920 int index, long value) {
921 ensureVersionsIsMutable();
922 versions_.set(index, value);
923 onChanged();
924 return this;
925 }
926 public Builder addVersions(long value) {
927 ensureVersionsIsMutable();
928 versions_.add(value);
929 onChanged();
930 return this;
931 }
932 public Builder addAllVersions(
933 java.lang.Iterable<? extends java.lang.Long> values) {
934 ensureVersionsIsMutable();
935 super.addAll(values, versions_);
936 onChanged();
937 return this;
938 }
939 public Builder clearVersions() {
940 versions_ = java.util.Collections.emptyList();;
941 bitField0_ = (bitField0_ & ~0x00000002);
942 onChanged();
943 return this;
944 }
945
946 // @@protoc_insertion_point(builder_scope:hadoop.common.ProtocolVersionProto)
947 }
948
949 static {
950 defaultInstance = new ProtocolVersionProto(true);
951 defaultInstance.initFields();
952 }
953
954 // @@protoc_insertion_point(class_scope:hadoop.common.ProtocolVersionProto)
955 }
956
957 public interface GetProtocolVersionsResponseProtoOrBuilder
958 extends com.google.protobuf.MessageOrBuilder {
959
960 // repeated .hadoop.common.ProtocolVersionProto protocolVersions = 1;
961 java.util.List<org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto>
962 getProtocolVersionsList();
963 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto getProtocolVersions(int index);
964 int getProtocolVersionsCount();
965 java.util.List<? extends org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProtoOrBuilder>
966 getProtocolVersionsOrBuilderList();
967 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProtoOrBuilder getProtocolVersionsOrBuilder(
968 int index);
969 }
970 public static final class GetProtocolVersionsResponseProto extends
971 com.google.protobuf.GeneratedMessage
972 implements GetProtocolVersionsResponseProtoOrBuilder {
973 // Use GetProtocolVersionsResponseProto.newBuilder() to construct.
974 private GetProtocolVersionsResponseProto(Builder builder) {
975 super(builder);
976 }
977 private GetProtocolVersionsResponseProto(boolean noInit) {}
978
979 private static final GetProtocolVersionsResponseProto defaultInstance;
980 public static GetProtocolVersionsResponseProto getDefaultInstance() {
981 return defaultInstance;
982 }
983
984 public GetProtocolVersionsResponseProto getDefaultInstanceForType() {
985 return defaultInstance;
986 }
987
988 public static final com.google.protobuf.Descriptors.Descriptor
989 getDescriptor() {
990 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_GetProtocolVersionsResponseProto_descriptor;
991 }
992
993 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
994 internalGetFieldAccessorTable() {
995 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_GetProtocolVersionsResponseProto_fieldAccessorTable;
996 }
997
998 // repeated .hadoop.common.ProtocolVersionProto protocolVersions = 1;
999 public static final int PROTOCOLVERSIONS_FIELD_NUMBER = 1;
1000 private java.util.List<org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto> protocolVersions_;
1001 public java.util.List<org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto> getProtocolVersionsList() {
1002 return protocolVersions_;
1003 }
1004 public java.util.List<? extends org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProtoOrBuilder>
1005 getProtocolVersionsOrBuilderList() {
1006 return protocolVersions_;
1007 }
1008 public int getProtocolVersionsCount() {
1009 return protocolVersions_.size();
1010 }
1011 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto getProtocolVersions(int index) {
1012 return protocolVersions_.get(index);
1013 }
1014 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProtoOrBuilder getProtocolVersionsOrBuilder(
1015 int index) {
1016 return protocolVersions_.get(index);
1017 }
1018
1019 private void initFields() {
1020 protocolVersions_ = java.util.Collections.emptyList();
1021 }
1022 private byte memoizedIsInitialized = -1;
1023 public final boolean isInitialized() {
1024 byte isInitialized = memoizedIsInitialized;
1025 if (isInitialized != -1) return isInitialized == 1;
1026
1027 for (int i = 0; i < getProtocolVersionsCount(); i++) {
1028 if (!getProtocolVersions(i).isInitialized()) {
1029 memoizedIsInitialized = 0;
1030 return false;
1031 }
1032 }
1033 memoizedIsInitialized = 1;
1034 return true;
1035 }
1036
1037 public void writeTo(com.google.protobuf.CodedOutputStream output)
1038 throws java.io.IOException {
1039 getSerializedSize();
1040 for (int i = 0; i < protocolVersions_.size(); i++) {
1041 output.writeMessage(1, protocolVersions_.get(i));
1042 }
1043 getUnknownFields().writeTo(output);
1044 }
1045
1046 private int memoizedSerializedSize = -1;
1047 public int getSerializedSize() {
1048 int size = memoizedSerializedSize;
1049 if (size != -1) return size;
1050
1051 size = 0;
1052 for (int i = 0; i < protocolVersions_.size(); i++) {
1053 size += com.google.protobuf.CodedOutputStream
1054 .computeMessageSize(1, protocolVersions_.get(i));
1055 }
1056 size += getUnknownFields().getSerializedSize();
1057 memoizedSerializedSize = size;
1058 return size;
1059 }
1060
1061 private static final long serialVersionUID = 0L;
1062 @java.lang.Override
1063 protected java.lang.Object writeReplace()
1064 throws java.io.ObjectStreamException {
1065 return super.writeReplace();
1066 }
1067
1068 @java.lang.Override
1069 public boolean equals(final java.lang.Object obj) {
1070 if (obj == this) {
1071 return true;
1072 }
1073 if (!(obj instanceof org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto)) {
1074 return super.equals(obj);
1075 }
1076 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto other = (org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto) obj;
1077
1078 boolean result = true;
1079 result = result && getProtocolVersionsList()
1080 .equals(other.getProtocolVersionsList());
1081 result = result &&
1082 getUnknownFields().equals(other.getUnknownFields());
1083 return result;
1084 }
1085
1086 @java.lang.Override
1087 public int hashCode() {
1088 int hash = 41;
1089 hash = (19 * hash) + getDescriptorForType().hashCode();
1090 if (getProtocolVersionsCount() > 0) {
1091 hash = (37 * hash) + PROTOCOLVERSIONS_FIELD_NUMBER;
1092 hash = (53 * hash) + getProtocolVersionsList().hashCode();
1093 }
1094 hash = (29 * hash) + getUnknownFields().hashCode();
1095 return hash;
1096 }
1097
1098 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto parseFrom(
1099 com.google.protobuf.ByteString data)
1100 throws com.google.protobuf.InvalidProtocolBufferException {
1101 return newBuilder().mergeFrom(data).buildParsed();
1102 }
1103 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto parseFrom(
1104 com.google.protobuf.ByteString data,
1105 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1106 throws com.google.protobuf.InvalidProtocolBufferException {
1107 return newBuilder().mergeFrom(data, extensionRegistry)
1108 .buildParsed();
1109 }
1110 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto parseFrom(byte[] data)
1111 throws com.google.protobuf.InvalidProtocolBufferException {
1112 return newBuilder().mergeFrom(data).buildParsed();
1113 }
1114 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto parseFrom(
1115 byte[] data,
1116 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1117 throws com.google.protobuf.InvalidProtocolBufferException {
1118 return newBuilder().mergeFrom(data, extensionRegistry)
1119 .buildParsed();
1120 }
1121 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto parseFrom(java.io.InputStream input)
1122 throws java.io.IOException {
1123 return newBuilder().mergeFrom(input).buildParsed();
1124 }
1125 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto parseFrom(
1126 java.io.InputStream input,
1127 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1128 throws java.io.IOException {
1129 return newBuilder().mergeFrom(input, extensionRegistry)
1130 .buildParsed();
1131 }
1132 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto parseDelimitedFrom(java.io.InputStream input)
1133 throws java.io.IOException {
1134 Builder builder = newBuilder();
1135 if (builder.mergeDelimitedFrom(input)) {
1136 return builder.buildParsed();
1137 } else {
1138 return null;
1139 }
1140 }
1141 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto parseDelimitedFrom(
1142 java.io.InputStream input,
1143 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1144 throws java.io.IOException {
1145 Builder builder = newBuilder();
1146 if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
1147 return builder.buildParsed();
1148 } else {
1149 return null;
1150 }
1151 }
1152 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto parseFrom(
1153 com.google.protobuf.CodedInputStream input)
1154 throws java.io.IOException {
1155 return newBuilder().mergeFrom(input).buildParsed();
1156 }
1157 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto parseFrom(
1158 com.google.protobuf.CodedInputStream input,
1159 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1160 throws java.io.IOException {
1161 return newBuilder().mergeFrom(input, extensionRegistry)
1162 .buildParsed();
1163 }
1164
1165 public static Builder newBuilder() { return Builder.create(); }
1166 public Builder newBuilderForType() { return newBuilder(); }
1167 public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto prototype) {
1168 return newBuilder().mergeFrom(prototype);
1169 }
1170 public Builder toBuilder() { return newBuilder(this); }
1171
1172 @java.lang.Override
1173 protected Builder newBuilderForType(
1174 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1175 Builder builder = new Builder(parent);
1176 return builder;
1177 }
1178 public static final class Builder extends
1179 com.google.protobuf.GeneratedMessage.Builder<Builder>
1180 implements org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProtoOrBuilder {
1181 public static final com.google.protobuf.Descriptors.Descriptor
1182 getDescriptor() {
1183 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_GetProtocolVersionsResponseProto_descriptor;
1184 }
1185
1186 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1187 internalGetFieldAccessorTable() {
1188 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_GetProtocolVersionsResponseProto_fieldAccessorTable;
1189 }
1190
1191 // Construct using org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto.newBuilder()
1192 private Builder() {
1193 maybeForceBuilderInitialization();
1194 }
1195
1196 private Builder(BuilderParent parent) {
1197 super(parent);
1198 maybeForceBuilderInitialization();
1199 }
1200 private void maybeForceBuilderInitialization() {
1201 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
1202 getProtocolVersionsFieldBuilder();
1203 }
1204 }
1205 private static Builder create() {
1206 return new Builder();
1207 }
1208
1209 public Builder clear() {
1210 super.clear();
1211 if (protocolVersionsBuilder_ == null) {
1212 protocolVersions_ = java.util.Collections.emptyList();
1213 bitField0_ = (bitField0_ & ~0x00000001);
1214 } else {
1215 protocolVersionsBuilder_.clear();
1216 }
1217 return this;
1218 }
1219
1220 public Builder clone() {
1221 return create().mergeFrom(buildPartial());
1222 }
1223
1224 public com.google.protobuf.Descriptors.Descriptor
1225 getDescriptorForType() {
1226 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto.getDescriptor();
1227 }
1228
1229 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto getDefaultInstanceForType() {
1230 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto.getDefaultInstance();
1231 }
1232
1233 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto build() {
1234 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto result = buildPartial();
1235 if (!result.isInitialized()) {
1236 throw newUninitializedMessageException(result);
1237 }
1238 return result;
1239 }
1240
1241 private org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto buildParsed()
1242 throws com.google.protobuf.InvalidProtocolBufferException {
1243 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto result = buildPartial();
1244 if (!result.isInitialized()) {
1245 throw newUninitializedMessageException(
1246 result).asInvalidProtocolBufferException();
1247 }
1248 return result;
1249 }
1250
1251 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto buildPartial() {
1252 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto result = new org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto(this);
1253 int from_bitField0_ = bitField0_;
1254 if (protocolVersionsBuilder_ == null) {
1255 if (((bitField0_ & 0x00000001) == 0x00000001)) {
1256 protocolVersions_ = java.util.Collections.unmodifiableList(protocolVersions_);
1257 bitField0_ = (bitField0_ & ~0x00000001);
1258 }
1259 result.protocolVersions_ = protocolVersions_;
1260 } else {
1261 result.protocolVersions_ = protocolVersionsBuilder_.build();
1262 }
1263 onBuilt();
1264 return result;
1265 }
1266
1267 public Builder mergeFrom(com.google.protobuf.Message other) {
1268 if (other instanceof org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto) {
1269 return mergeFrom((org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto)other);
1270 } else {
1271 super.mergeFrom(other);
1272 return this;
1273 }
1274 }
1275
1276 public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto other) {
1277 if (other == org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto.getDefaultInstance()) return this;
1278 if (protocolVersionsBuilder_ == null) {
1279 if (!other.protocolVersions_.isEmpty()) {
1280 if (protocolVersions_.isEmpty()) {
1281 protocolVersions_ = other.protocolVersions_;
1282 bitField0_ = (bitField0_ & ~0x00000001);
1283 } else {
1284 ensureProtocolVersionsIsMutable();
1285 protocolVersions_.addAll(other.protocolVersions_);
1286 }
1287 onChanged();
1288 }
1289 } else {
1290 if (!other.protocolVersions_.isEmpty()) {
1291 if (protocolVersionsBuilder_.isEmpty()) {
1292 protocolVersionsBuilder_.dispose();
1293 protocolVersionsBuilder_ = null;
1294 protocolVersions_ = other.protocolVersions_;
1295 bitField0_ = (bitField0_ & ~0x00000001);
1296 protocolVersionsBuilder_ =
1297 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
1298 getProtocolVersionsFieldBuilder() : null;
1299 } else {
1300 protocolVersionsBuilder_.addAllMessages(other.protocolVersions_);
1301 }
1302 }
1303 }
1304 this.mergeUnknownFields(other.getUnknownFields());
1305 return this;
1306 }
1307
1308 public final boolean isInitialized() {
1309 for (int i = 0; i < getProtocolVersionsCount(); i++) {
1310 if (!getProtocolVersions(i).isInitialized()) {
1311
1312 return false;
1313 }
1314 }
1315 return true;
1316 }
1317
1318 public Builder mergeFrom(
1319 com.google.protobuf.CodedInputStream input,
1320 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1321 throws java.io.IOException {
1322 com.google.protobuf.UnknownFieldSet.Builder unknownFields =
1323 com.google.protobuf.UnknownFieldSet.newBuilder(
1324 this.getUnknownFields());
1325 while (true) {
1326 int tag = input.readTag();
1327 switch (tag) {
1328 case 0:
1329 this.setUnknownFields(unknownFields.build());
1330 onChanged();
1331 return this;
1332 default: {
1333 if (!parseUnknownField(input, unknownFields,
1334 extensionRegistry, tag)) {
1335 this.setUnknownFields(unknownFields.build());
1336 onChanged();
1337 return this;
1338 }
1339 break;
1340 }
1341 case 10: {
1342 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto.Builder subBuilder = org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto.newBuilder();
1343 input.readMessage(subBuilder, extensionRegistry);
1344 addProtocolVersions(subBuilder.buildPartial());
1345 break;
1346 }
1347 }
1348 }
1349 }
1350
1351 private int bitField0_;
1352
1353 // repeated .hadoop.common.ProtocolVersionProto protocolVersions = 1;
1354 private java.util.List<org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto> protocolVersions_ =
1355 java.util.Collections.emptyList();
1356 private void ensureProtocolVersionsIsMutable() {
1357 if (!((bitField0_ & 0x00000001) == 0x00000001)) {
1358 protocolVersions_ = new java.util.ArrayList<org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto>(protocolVersions_);
1359 bitField0_ |= 0x00000001;
1360 }
1361 }
1362
1363 private com.google.protobuf.RepeatedFieldBuilder<
1364 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto.Builder, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProtoOrBuilder> protocolVersionsBuilder_;
1365
1366 public java.util.List<org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto> getProtocolVersionsList() {
1367 if (protocolVersionsBuilder_ == null) {
1368 return java.util.Collections.unmodifiableList(protocolVersions_);
1369 } else {
1370 return protocolVersionsBuilder_.getMessageList();
1371 }
1372 }
1373 public int getProtocolVersionsCount() {
1374 if (protocolVersionsBuilder_ == null) {
1375 return protocolVersions_.size();
1376 } else {
1377 return protocolVersionsBuilder_.getCount();
1378 }
1379 }
1380 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto getProtocolVersions(int index) {
1381 if (protocolVersionsBuilder_ == null) {
1382 return protocolVersions_.get(index);
1383 } else {
1384 return protocolVersionsBuilder_.getMessage(index);
1385 }
1386 }
1387 public Builder setProtocolVersions(
1388 int index, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto value) {
1389 if (protocolVersionsBuilder_ == null) {
1390 if (value == null) {
1391 throw new NullPointerException();
1392 }
1393 ensureProtocolVersionsIsMutable();
1394 protocolVersions_.set(index, value);
1395 onChanged();
1396 } else {
1397 protocolVersionsBuilder_.setMessage(index, value);
1398 }
1399 return this;
1400 }
1401 public Builder setProtocolVersions(
1402 int index, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto.Builder builderForValue) {
1403 if (protocolVersionsBuilder_ == null) {
1404 ensureProtocolVersionsIsMutable();
1405 protocolVersions_.set(index, builderForValue.build());
1406 onChanged();
1407 } else {
1408 protocolVersionsBuilder_.setMessage(index, builderForValue.build());
1409 }
1410 return this;
1411 }
1412 public Builder addProtocolVersions(org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto value) {
1413 if (protocolVersionsBuilder_ == null) {
1414 if (value == null) {
1415 throw new NullPointerException();
1416 }
1417 ensureProtocolVersionsIsMutable();
1418 protocolVersions_.add(value);
1419 onChanged();
1420 } else {
1421 protocolVersionsBuilder_.addMessage(value);
1422 }
1423 return this;
1424 }
1425 public Builder addProtocolVersions(
1426 int index, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto value) {
1427 if (protocolVersionsBuilder_ == null) {
1428 if (value == null) {
1429 throw new NullPointerException();
1430 }
1431 ensureProtocolVersionsIsMutable();
1432 protocolVersions_.add(index, value);
1433 onChanged();
1434 } else {
1435 protocolVersionsBuilder_.addMessage(index, value);
1436 }
1437 return this;
1438 }
1439 public Builder addProtocolVersions(
1440 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto.Builder builderForValue) {
1441 if (protocolVersionsBuilder_ == null) {
1442 ensureProtocolVersionsIsMutable();
1443 protocolVersions_.add(builderForValue.build());
1444 onChanged();
1445 } else {
1446 protocolVersionsBuilder_.addMessage(builderForValue.build());
1447 }
1448 return this;
1449 }
1450 public Builder addProtocolVersions(
1451 int index, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto.Builder builderForValue) {
1452 if (protocolVersionsBuilder_ == null) {
1453 ensureProtocolVersionsIsMutable();
1454 protocolVersions_.add(index, builderForValue.build());
1455 onChanged();
1456 } else {
1457 protocolVersionsBuilder_.addMessage(index, builderForValue.build());
1458 }
1459 return this;
1460 }
1461 public Builder addAllProtocolVersions(
1462 java.lang.Iterable<? extends org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto> values) {
1463 if (protocolVersionsBuilder_ == null) {
1464 ensureProtocolVersionsIsMutable();
1465 super.addAll(values, protocolVersions_);
1466 onChanged();
1467 } else {
1468 protocolVersionsBuilder_.addAllMessages(values);
1469 }
1470 return this;
1471 }
1472 public Builder clearProtocolVersions() {
1473 if (protocolVersionsBuilder_ == null) {
1474 protocolVersions_ = java.util.Collections.emptyList();
1475 bitField0_ = (bitField0_ & ~0x00000001);
1476 onChanged();
1477 } else {
1478 protocolVersionsBuilder_.clear();
1479 }
1480 return this;
1481 }
1482 public Builder removeProtocolVersions(int index) {
1483 if (protocolVersionsBuilder_ == null) {
1484 ensureProtocolVersionsIsMutable();
1485 protocolVersions_.remove(index);
1486 onChanged();
1487 } else {
1488 protocolVersionsBuilder_.remove(index);
1489 }
1490 return this;
1491 }
1492 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto.Builder getProtocolVersionsBuilder(
1493 int index) {
1494 return getProtocolVersionsFieldBuilder().getBuilder(index);
1495 }
1496 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProtoOrBuilder getProtocolVersionsOrBuilder(
1497 int index) {
1498 if (protocolVersionsBuilder_ == null) {
1499 return protocolVersions_.get(index); } else {
1500 return protocolVersionsBuilder_.getMessageOrBuilder(index);
1501 }
1502 }
1503 public java.util.List<? extends org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProtoOrBuilder>
1504 getProtocolVersionsOrBuilderList() {
1505 if (protocolVersionsBuilder_ != null) {
1506 return protocolVersionsBuilder_.getMessageOrBuilderList();
1507 } else {
1508 return java.util.Collections.unmodifiableList(protocolVersions_);
1509 }
1510 }
1511 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto.Builder addProtocolVersionsBuilder() {
1512 return getProtocolVersionsFieldBuilder().addBuilder(
1513 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto.getDefaultInstance());
1514 }
1515 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto.Builder addProtocolVersionsBuilder(
1516 int index) {
1517 return getProtocolVersionsFieldBuilder().addBuilder(
1518 index, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto.getDefaultInstance());
1519 }
1520 public java.util.List<org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto.Builder>
1521 getProtocolVersionsBuilderList() {
1522 return getProtocolVersionsFieldBuilder().getBuilderList();
1523 }
1524 private com.google.protobuf.RepeatedFieldBuilder<
1525 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto.Builder, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProtoOrBuilder>
1526 getProtocolVersionsFieldBuilder() {
1527 if (protocolVersionsBuilder_ == null) {
1528 protocolVersionsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
1529 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto.Builder, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProtoOrBuilder>(
1530 protocolVersions_,
1531 ((bitField0_ & 0x00000001) == 0x00000001),
1532 getParentForChildren(),
1533 isClean());
1534 protocolVersions_ = null;
1535 }
1536 return protocolVersionsBuilder_;
1537 }
1538
1539 // @@protoc_insertion_point(builder_scope:hadoop.common.GetProtocolVersionsResponseProto)
1540 }
1541
1542 static {
1543 defaultInstance = new GetProtocolVersionsResponseProto(true);
1544 defaultInstance.initFields();
1545 }
1546
1547 // @@protoc_insertion_point(class_scope:hadoop.common.GetProtocolVersionsResponseProto)
1548 }
1549
1550 public interface GetProtocolSignatureRequestProtoOrBuilder
1551 extends com.google.protobuf.MessageOrBuilder {
1552
1553 // required string protocol = 1;
1554 boolean hasProtocol();
1555 String getProtocol();
1556
1557 // required string rpcKind = 2;
1558 boolean hasRpcKind();
1559 String getRpcKind();
1560 }
1561 public static final class GetProtocolSignatureRequestProto extends
1562 com.google.protobuf.GeneratedMessage
1563 implements GetProtocolSignatureRequestProtoOrBuilder {
1564 // Use GetProtocolSignatureRequestProto.newBuilder() to construct.
1565 private GetProtocolSignatureRequestProto(Builder builder) {
1566 super(builder);
1567 }
1568 private GetProtocolSignatureRequestProto(boolean noInit) {}
1569
1570 private static final GetProtocolSignatureRequestProto defaultInstance;
1571 public static GetProtocolSignatureRequestProto getDefaultInstance() {
1572 return defaultInstance;
1573 }
1574
1575 public GetProtocolSignatureRequestProto getDefaultInstanceForType() {
1576 return defaultInstance;
1577 }
1578
1579 public static final com.google.protobuf.Descriptors.Descriptor
1580 getDescriptor() {
1581 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_GetProtocolSignatureRequestProto_descriptor;
1582 }
1583
1584 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1585 internalGetFieldAccessorTable() {
1586 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_GetProtocolSignatureRequestProto_fieldAccessorTable;
1587 }
1588
1589 private int bitField0_;
1590 // required string protocol = 1;
1591 public static final int PROTOCOL_FIELD_NUMBER = 1;
1592 private java.lang.Object protocol_;
1593 public boolean hasProtocol() {
1594 return ((bitField0_ & 0x00000001) == 0x00000001);
1595 }
1596 public String getProtocol() {
1597 java.lang.Object ref = protocol_;
1598 if (ref instanceof String) {
1599 return (String) ref;
1600 } else {
1601 com.google.protobuf.ByteString bs =
1602 (com.google.protobuf.ByteString) ref;
1603 String s = bs.toStringUtf8();
1604 if (com.google.protobuf.Internal.isValidUtf8(bs)) {
1605 protocol_ = s;
1606 }
1607 return s;
1608 }
1609 }
1610 private com.google.protobuf.ByteString getProtocolBytes() {
1611 java.lang.Object ref = protocol_;
1612 if (ref instanceof String) {
1613 com.google.protobuf.ByteString b =
1614 com.google.protobuf.ByteString.copyFromUtf8((String) ref);
1615 protocol_ = b;
1616 return b;
1617 } else {
1618 return (com.google.protobuf.ByteString) ref;
1619 }
1620 }
1621
1622 // required string rpcKind = 2;
1623 public static final int RPCKIND_FIELD_NUMBER = 2;
1624 private java.lang.Object rpcKind_;
1625 public boolean hasRpcKind() {
1626 return ((bitField0_ & 0x00000002) == 0x00000002);
1627 }
1628 public String getRpcKind() {
1629 java.lang.Object ref = rpcKind_;
1630 if (ref instanceof String) {
1631 return (String) ref;
1632 } else {
1633 com.google.protobuf.ByteString bs =
1634 (com.google.protobuf.ByteString) ref;
1635 String s = bs.toStringUtf8();
1636 if (com.google.protobuf.Internal.isValidUtf8(bs)) {
1637 rpcKind_ = s;
1638 }
1639 return s;
1640 }
1641 }
1642 private com.google.protobuf.ByteString getRpcKindBytes() {
1643 java.lang.Object ref = rpcKind_;
1644 if (ref instanceof String) {
1645 com.google.protobuf.ByteString b =
1646 com.google.protobuf.ByteString.copyFromUtf8((String) ref);
1647 rpcKind_ = b;
1648 return b;
1649 } else {
1650 return (com.google.protobuf.ByteString) ref;
1651 }
1652 }
1653
1654 private void initFields() {
1655 protocol_ = "";
1656 rpcKind_ = "";
1657 }
1658 private byte memoizedIsInitialized = -1;
1659 public final boolean isInitialized() {
1660 byte isInitialized = memoizedIsInitialized;
1661 if (isInitialized != -1) return isInitialized == 1;
1662
1663 if (!hasProtocol()) {
1664 memoizedIsInitialized = 0;
1665 return false;
1666 }
1667 if (!hasRpcKind()) {
1668 memoizedIsInitialized = 0;
1669 return false;
1670 }
1671 memoizedIsInitialized = 1;
1672 return true;
1673 }
1674
1675 public void writeTo(com.google.protobuf.CodedOutputStream output)
1676 throws java.io.IOException {
1677 getSerializedSize();
1678 if (((bitField0_ & 0x00000001) == 0x00000001)) {
1679 output.writeBytes(1, getProtocolBytes());
1680 }
1681 if (((bitField0_ & 0x00000002) == 0x00000002)) {
1682 output.writeBytes(2, getRpcKindBytes());
1683 }
1684 getUnknownFields().writeTo(output);
1685 }
1686
1687 private int memoizedSerializedSize = -1;
1688 public int getSerializedSize() {
1689 int size = memoizedSerializedSize;
1690 if (size != -1) return size;
1691
1692 size = 0;
1693 if (((bitField0_ & 0x00000001) == 0x00000001)) {
1694 size += com.google.protobuf.CodedOutputStream
1695 .computeBytesSize(1, getProtocolBytes());
1696 }
1697 if (((bitField0_ & 0x00000002) == 0x00000002)) {
1698 size += com.google.protobuf.CodedOutputStream
1699 .computeBytesSize(2, getRpcKindBytes());
1700 }
1701 size += getUnknownFields().getSerializedSize();
1702 memoizedSerializedSize = size;
1703 return size;
1704 }
1705
1706 private static final long serialVersionUID = 0L;
1707 @java.lang.Override
1708 protected java.lang.Object writeReplace()
1709 throws java.io.ObjectStreamException {
1710 return super.writeReplace();
1711 }
1712
1713 @java.lang.Override
1714 public boolean equals(final java.lang.Object obj) {
1715 if (obj == this) {
1716 return true;
1717 }
1718 if (!(obj instanceof org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto)) {
1719 return super.equals(obj);
1720 }
1721 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto other = (org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto) obj;
1722
1723 boolean result = true;
1724 result = result && (hasProtocol() == other.hasProtocol());
1725 if (hasProtocol()) {
1726 result = result && getProtocol()
1727 .equals(other.getProtocol());
1728 }
1729 result = result && (hasRpcKind() == other.hasRpcKind());
1730 if (hasRpcKind()) {
1731 result = result && getRpcKind()
1732 .equals(other.getRpcKind());
1733 }
1734 result = result &&
1735 getUnknownFields().equals(other.getUnknownFields());
1736 return result;
1737 }
1738
1739 @java.lang.Override
1740 public int hashCode() {
1741 int hash = 41;
1742 hash = (19 * hash) + getDescriptorForType().hashCode();
1743 if (hasProtocol()) {
1744 hash = (37 * hash) + PROTOCOL_FIELD_NUMBER;
1745 hash = (53 * hash) + getProtocol().hashCode();
1746 }
1747 if (hasRpcKind()) {
1748 hash = (37 * hash) + RPCKIND_FIELD_NUMBER;
1749 hash = (53 * hash) + getRpcKind().hashCode();
1750 }
1751 hash = (29 * hash) + getUnknownFields().hashCode();
1752 return hash;
1753 }
1754
1755 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto parseFrom(
1756 com.google.protobuf.ByteString data)
1757 throws com.google.protobuf.InvalidProtocolBufferException {
1758 return newBuilder().mergeFrom(data).buildParsed();
1759 }
1760 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto parseFrom(
1761 com.google.protobuf.ByteString data,
1762 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1763 throws com.google.protobuf.InvalidProtocolBufferException {
1764 return newBuilder().mergeFrom(data, extensionRegistry)
1765 .buildParsed();
1766 }
1767 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto parseFrom(byte[] data)
1768 throws com.google.protobuf.InvalidProtocolBufferException {
1769 return newBuilder().mergeFrom(data).buildParsed();
1770 }
1771 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto parseFrom(
1772 byte[] data,
1773 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1774 throws com.google.protobuf.InvalidProtocolBufferException {
1775 return newBuilder().mergeFrom(data, extensionRegistry)
1776 .buildParsed();
1777 }
1778 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto parseFrom(java.io.InputStream input)
1779 throws java.io.IOException {
1780 return newBuilder().mergeFrom(input).buildParsed();
1781 }
1782 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto parseFrom(
1783 java.io.InputStream input,
1784 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1785 throws java.io.IOException {
1786 return newBuilder().mergeFrom(input, extensionRegistry)
1787 .buildParsed();
1788 }
1789 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto parseDelimitedFrom(java.io.InputStream input)
1790 throws java.io.IOException {
1791 Builder builder = newBuilder();
1792 if (builder.mergeDelimitedFrom(input)) {
1793 return builder.buildParsed();
1794 } else {
1795 return null;
1796 }
1797 }
1798 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto parseDelimitedFrom(
1799 java.io.InputStream input,
1800 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1801 throws java.io.IOException {
1802 Builder builder = newBuilder();
1803 if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
1804 return builder.buildParsed();
1805 } else {
1806 return null;
1807 }
1808 }
1809 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto parseFrom(
1810 com.google.protobuf.CodedInputStream input)
1811 throws java.io.IOException {
1812 return newBuilder().mergeFrom(input).buildParsed();
1813 }
1814 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto parseFrom(
1815 com.google.protobuf.CodedInputStream input,
1816 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1817 throws java.io.IOException {
1818 return newBuilder().mergeFrom(input, extensionRegistry)
1819 .buildParsed();
1820 }
1821
1822 public static Builder newBuilder() { return Builder.create(); }
1823 public Builder newBuilderForType() { return newBuilder(); }
1824 public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto prototype) {
1825 return newBuilder().mergeFrom(prototype);
1826 }
1827 public Builder toBuilder() { return newBuilder(this); }
1828
1829 @java.lang.Override
1830 protected Builder newBuilderForType(
1831 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1832 Builder builder = new Builder(parent);
1833 return builder;
1834 }
1835 public static final class Builder extends
1836 com.google.protobuf.GeneratedMessage.Builder<Builder>
1837 implements org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProtoOrBuilder {
1838 public static final com.google.protobuf.Descriptors.Descriptor
1839 getDescriptor() {
1840 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_GetProtocolSignatureRequestProto_descriptor;
1841 }
1842
1843 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1844 internalGetFieldAccessorTable() {
1845 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_GetProtocolSignatureRequestProto_fieldAccessorTable;
1846 }
1847
1848 // Construct using org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto.newBuilder()
1849 private Builder() {
1850 maybeForceBuilderInitialization();
1851 }
1852
1853 private Builder(BuilderParent parent) {
1854 super(parent);
1855 maybeForceBuilderInitialization();
1856 }
1857 private void maybeForceBuilderInitialization() {
1858 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
1859 }
1860 }
1861 private static Builder create() {
1862 return new Builder();
1863 }
1864
1865 public Builder clear() {
1866 super.clear();
1867 protocol_ = "";
1868 bitField0_ = (bitField0_ & ~0x00000001);
1869 rpcKind_ = "";
1870 bitField0_ = (bitField0_ & ~0x00000002);
1871 return this;
1872 }
1873
1874 public Builder clone() {
1875 return create().mergeFrom(buildPartial());
1876 }
1877
1878 public com.google.protobuf.Descriptors.Descriptor
1879 getDescriptorForType() {
1880 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto.getDescriptor();
1881 }
1882
1883 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto getDefaultInstanceForType() {
1884 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto.getDefaultInstance();
1885 }
1886
1887 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto build() {
1888 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto result = buildPartial();
1889 if (!result.isInitialized()) {
1890 throw newUninitializedMessageException(result);
1891 }
1892 return result;
1893 }
1894
1895 private org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto buildParsed()
1896 throws com.google.protobuf.InvalidProtocolBufferException {
1897 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto result = buildPartial();
1898 if (!result.isInitialized()) {
1899 throw newUninitializedMessageException(
1900 result).asInvalidProtocolBufferException();
1901 }
1902 return result;
1903 }
1904
1905 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto buildPartial() {
1906 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto result = new org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto(this);
1907 int from_bitField0_ = bitField0_;
1908 int to_bitField0_ = 0;
1909 if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
1910 to_bitField0_ |= 0x00000001;
1911 }
1912 result.protocol_ = protocol_;
1913 if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
1914 to_bitField0_ |= 0x00000002;
1915 }
1916 result.rpcKind_ = rpcKind_;
1917 result.bitField0_ = to_bitField0_;
1918 onBuilt();
1919 return result;
1920 }
1921
1922 public Builder mergeFrom(com.google.protobuf.Message other) {
1923 if (other instanceof org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto) {
1924 return mergeFrom((org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto)other);
1925 } else {
1926 super.mergeFrom(other);
1927 return this;
1928 }
1929 }
1930
1931 public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto other) {
1932 if (other == org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto.getDefaultInstance()) return this;
1933 if (other.hasProtocol()) {
1934 setProtocol(other.getProtocol());
1935 }
1936 if (other.hasRpcKind()) {
1937 setRpcKind(other.getRpcKind());
1938 }
1939 this.mergeUnknownFields(other.getUnknownFields());
1940 return this;
1941 }
1942
1943 public final boolean isInitialized() {
1944 if (!hasProtocol()) {
1945
1946 return false;
1947 }
1948 if (!hasRpcKind()) {
1949
1950 return false;
1951 }
1952 return true;
1953 }
1954
1955 public Builder mergeFrom(
1956 com.google.protobuf.CodedInputStream input,
1957 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1958 throws java.io.IOException {
1959 com.google.protobuf.UnknownFieldSet.Builder unknownFields =
1960 com.google.protobuf.UnknownFieldSet.newBuilder(
1961 this.getUnknownFields());
1962 while (true) {
1963 int tag = input.readTag();
1964 switch (tag) {
1965 case 0:
1966 this.setUnknownFields(unknownFields.build());
1967 onChanged();
1968 return this;
1969 default: {
1970 if (!parseUnknownField(input, unknownFields,
1971 extensionRegistry, tag)) {
1972 this.setUnknownFields(unknownFields.build());
1973 onChanged();
1974 return this;
1975 }
1976 break;
1977 }
1978 case 10: {
1979 bitField0_ |= 0x00000001;
1980 protocol_ = input.readBytes();
1981 break;
1982 }
1983 case 18: {
1984 bitField0_ |= 0x00000002;
1985 rpcKind_ = input.readBytes();
1986 break;
1987 }
1988 }
1989 }
1990 }
1991
1992 private int bitField0_;
1993
1994 // required string protocol = 1;
1995 private java.lang.Object protocol_ = "";
1996 public boolean hasProtocol() {
1997 return ((bitField0_ & 0x00000001) == 0x00000001);
1998 }
1999 public String getProtocol() {
2000 java.lang.Object ref = protocol_;
2001 if (!(ref instanceof String)) {
2002 String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
2003 protocol_ = s;
2004 return s;
2005 } else {
2006 return (String) ref;
2007 }
2008 }
2009 public Builder setProtocol(String value) {
2010 if (value == null) {
2011 throw new NullPointerException();
2012 }
2013 bitField0_ |= 0x00000001;
2014 protocol_ = value;
2015 onChanged();
2016 return this;
2017 }
2018 public Builder clearProtocol() {
2019 bitField0_ = (bitField0_ & ~0x00000001);
2020 protocol_ = getDefaultInstance().getProtocol();
2021 onChanged();
2022 return this;
2023 }
2024 void setProtocol(com.google.protobuf.ByteString value) {
2025 bitField0_ |= 0x00000001;
2026 protocol_ = value;
2027 onChanged();
2028 }
2029
2030 // required string rpcKind = 2;
2031 private java.lang.Object rpcKind_ = "";
2032 public boolean hasRpcKind() {
2033 return ((bitField0_ & 0x00000002) == 0x00000002);
2034 }
2035 public String getRpcKind() {
2036 java.lang.Object ref = rpcKind_;
2037 if (!(ref instanceof String)) {
2038 String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
2039 rpcKind_ = s;
2040 return s;
2041 } else {
2042 return (String) ref;
2043 }
2044 }
2045 public Builder setRpcKind(String value) {
2046 if (value == null) {
2047 throw new NullPointerException();
2048 }
2049 bitField0_ |= 0x00000002;
2050 rpcKind_ = value;
2051 onChanged();
2052 return this;
2053 }
2054 public Builder clearRpcKind() {
2055 bitField0_ = (bitField0_ & ~0x00000002);
2056 rpcKind_ = getDefaultInstance().getRpcKind();
2057 onChanged();
2058 return this;
2059 }
2060 void setRpcKind(com.google.protobuf.ByteString value) {
2061 bitField0_ |= 0x00000002;
2062 rpcKind_ = value;
2063 onChanged();
2064 }
2065
2066 // @@protoc_insertion_point(builder_scope:hadoop.common.GetProtocolSignatureRequestProto)
2067 }
2068
2069 static {
2070 defaultInstance = new GetProtocolSignatureRequestProto(true);
2071 defaultInstance.initFields();
2072 }
2073
2074 // @@protoc_insertion_point(class_scope:hadoop.common.GetProtocolSignatureRequestProto)
2075 }
2076
2077 public interface GetProtocolSignatureResponseProtoOrBuilder
2078 extends com.google.protobuf.MessageOrBuilder {
2079
2080 // repeated .hadoop.common.ProtocolSignatureProto protocolSignature = 1;
2081 java.util.List<org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto>
2082 getProtocolSignatureList();
2083 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto getProtocolSignature(int index);
2084 int getProtocolSignatureCount();
2085 java.util.List<? extends org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProtoOrBuilder>
2086 getProtocolSignatureOrBuilderList();
2087 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProtoOrBuilder getProtocolSignatureOrBuilder(
2088 int index);
2089 }
2090 public static final class GetProtocolSignatureResponseProto extends
2091 com.google.protobuf.GeneratedMessage
2092 implements GetProtocolSignatureResponseProtoOrBuilder {
2093 // Use GetProtocolSignatureResponseProto.newBuilder() to construct.
2094 private GetProtocolSignatureResponseProto(Builder builder) {
2095 super(builder);
2096 }
2097 private GetProtocolSignatureResponseProto(boolean noInit) {}
2098
2099 private static final GetProtocolSignatureResponseProto defaultInstance;
2100 public static GetProtocolSignatureResponseProto getDefaultInstance() {
2101 return defaultInstance;
2102 }
2103
2104 public GetProtocolSignatureResponseProto getDefaultInstanceForType() {
2105 return defaultInstance;
2106 }
2107
2108 public static final com.google.protobuf.Descriptors.Descriptor
2109 getDescriptor() {
2110 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_GetProtocolSignatureResponseProto_descriptor;
2111 }
2112
2113 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
2114 internalGetFieldAccessorTable() {
2115 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_GetProtocolSignatureResponseProto_fieldAccessorTable;
2116 }
2117
2118 // repeated .hadoop.common.ProtocolSignatureProto protocolSignature = 1;
2119 public static final int PROTOCOLSIGNATURE_FIELD_NUMBER = 1;
2120 private java.util.List<org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto> protocolSignature_;
2121 public java.util.List<org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto> getProtocolSignatureList() {
2122 return protocolSignature_;
2123 }
2124 public java.util.List<? extends org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProtoOrBuilder>
2125 getProtocolSignatureOrBuilderList() {
2126 return protocolSignature_;
2127 }
2128 public int getProtocolSignatureCount() {
2129 return protocolSignature_.size();
2130 }
2131 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto getProtocolSignature(int index) {
2132 return protocolSignature_.get(index);
2133 }
2134 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProtoOrBuilder getProtocolSignatureOrBuilder(
2135 int index) {
2136 return protocolSignature_.get(index);
2137 }
2138
2139 private void initFields() {
2140 protocolSignature_ = java.util.Collections.emptyList();
2141 }
2142 private byte memoizedIsInitialized = -1;
2143 public final boolean isInitialized() {
2144 byte isInitialized = memoizedIsInitialized;
2145 if (isInitialized != -1) return isInitialized == 1;
2146
2147 for (int i = 0; i < getProtocolSignatureCount(); i++) {
2148 if (!getProtocolSignature(i).isInitialized()) {
2149 memoizedIsInitialized = 0;
2150 return false;
2151 }
2152 }
2153 memoizedIsInitialized = 1;
2154 return true;
2155 }
2156
2157 public void writeTo(com.google.protobuf.CodedOutputStream output)
2158 throws java.io.IOException {
2159 getSerializedSize();
2160 for (int i = 0; i < protocolSignature_.size(); i++) {
2161 output.writeMessage(1, protocolSignature_.get(i));
2162 }
2163 getUnknownFields().writeTo(output);
2164 }
2165
2166 private int memoizedSerializedSize = -1;
2167 public int getSerializedSize() {
2168 int size = memoizedSerializedSize;
2169 if (size != -1) return size;
2170
2171 size = 0;
2172 for (int i = 0; i < protocolSignature_.size(); i++) {
2173 size += com.google.protobuf.CodedOutputStream
2174 .computeMessageSize(1, protocolSignature_.get(i));
2175 }
2176 size += getUnknownFields().getSerializedSize();
2177 memoizedSerializedSize = size;
2178 return size;
2179 }
2180
2181 private static final long serialVersionUID = 0L;
2182 @java.lang.Override
2183 protected java.lang.Object writeReplace()
2184 throws java.io.ObjectStreamException {
2185 return super.writeReplace();
2186 }
2187
2188 @java.lang.Override
2189 public boolean equals(final java.lang.Object obj) {
2190 if (obj == this) {
2191 return true;
2192 }
2193 if (!(obj instanceof org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto)) {
2194 return super.equals(obj);
2195 }
2196 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto other = (org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto) obj;
2197
2198 boolean result = true;
2199 result = result && getProtocolSignatureList()
2200 .equals(other.getProtocolSignatureList());
2201 result = result &&
2202 getUnknownFields().equals(other.getUnknownFields());
2203 return result;
2204 }
2205
2206 @java.lang.Override
2207 public int hashCode() {
2208 int hash = 41;
2209 hash = (19 * hash) + getDescriptorForType().hashCode();
2210 if (getProtocolSignatureCount() > 0) {
2211 hash = (37 * hash) + PROTOCOLSIGNATURE_FIELD_NUMBER;
2212 hash = (53 * hash) + getProtocolSignatureList().hashCode();
2213 }
2214 hash = (29 * hash) + getUnknownFields().hashCode();
2215 return hash;
2216 }
2217
2218 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto parseFrom(
2219 com.google.protobuf.ByteString data)
2220 throws com.google.protobuf.InvalidProtocolBufferException {
2221 return newBuilder().mergeFrom(data).buildParsed();
2222 }
2223 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto parseFrom(
2224 com.google.protobuf.ByteString data,
2225 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2226 throws com.google.protobuf.InvalidProtocolBufferException {
2227 return newBuilder().mergeFrom(data, extensionRegistry)
2228 .buildParsed();
2229 }
2230 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto parseFrom(byte[] data)
2231 throws com.google.protobuf.InvalidProtocolBufferException {
2232 return newBuilder().mergeFrom(data).buildParsed();
2233 }
2234 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto parseFrom(
2235 byte[] data,
2236 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2237 throws com.google.protobuf.InvalidProtocolBufferException {
2238 return newBuilder().mergeFrom(data, extensionRegistry)
2239 .buildParsed();
2240 }
2241 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto parseFrom(java.io.InputStream input)
2242 throws java.io.IOException {
2243 return newBuilder().mergeFrom(input).buildParsed();
2244 }
2245 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto parseFrom(
2246 java.io.InputStream input,
2247 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2248 throws java.io.IOException {
2249 return newBuilder().mergeFrom(input, extensionRegistry)
2250 .buildParsed();
2251 }
2252 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto parseDelimitedFrom(java.io.InputStream input)
2253 throws java.io.IOException {
2254 Builder builder = newBuilder();
2255 if (builder.mergeDelimitedFrom(input)) {
2256 return builder.buildParsed();
2257 } else {
2258 return null;
2259 }
2260 }
2261 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto parseDelimitedFrom(
2262 java.io.InputStream input,
2263 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2264 throws java.io.IOException {
2265 Builder builder = newBuilder();
2266 if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
2267 return builder.buildParsed();
2268 } else {
2269 return null;
2270 }
2271 }
2272 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto parseFrom(
2273 com.google.protobuf.CodedInputStream input)
2274 throws java.io.IOException {
2275 return newBuilder().mergeFrom(input).buildParsed();
2276 }
2277 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto parseFrom(
2278 com.google.protobuf.CodedInputStream input,
2279 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2280 throws java.io.IOException {
2281 return newBuilder().mergeFrom(input, extensionRegistry)
2282 .buildParsed();
2283 }
2284
2285 public static Builder newBuilder() { return Builder.create(); }
2286 public Builder newBuilderForType() { return newBuilder(); }
2287 public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto prototype) {
2288 return newBuilder().mergeFrom(prototype);
2289 }
2290 public Builder toBuilder() { return newBuilder(this); }
2291
2292 @java.lang.Override
2293 protected Builder newBuilderForType(
2294 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2295 Builder builder = new Builder(parent);
2296 return builder;
2297 }
2298 public static final class Builder extends
2299 com.google.protobuf.GeneratedMessage.Builder<Builder>
2300 implements org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProtoOrBuilder {
2301 public static final com.google.protobuf.Descriptors.Descriptor
2302 getDescriptor() {
2303 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_GetProtocolSignatureResponseProto_descriptor;
2304 }
2305
2306 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
2307 internalGetFieldAccessorTable() {
2308 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_GetProtocolSignatureResponseProto_fieldAccessorTable;
2309 }
2310
2311 // Construct using org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto.newBuilder()
2312 private Builder() {
2313 maybeForceBuilderInitialization();
2314 }
2315
2316 private Builder(BuilderParent parent) {
2317 super(parent);
2318 maybeForceBuilderInitialization();
2319 }
2320 private void maybeForceBuilderInitialization() {
2321 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
2322 getProtocolSignatureFieldBuilder();
2323 }
2324 }
2325 private static Builder create() {
2326 return new Builder();
2327 }
2328
2329 public Builder clear() {
2330 super.clear();
2331 if (protocolSignatureBuilder_ == null) {
2332 protocolSignature_ = java.util.Collections.emptyList();
2333 bitField0_ = (bitField0_ & ~0x00000001);
2334 } else {
2335 protocolSignatureBuilder_.clear();
2336 }
2337 return this;
2338 }
2339
2340 public Builder clone() {
2341 return create().mergeFrom(buildPartial());
2342 }
2343
2344 public com.google.protobuf.Descriptors.Descriptor
2345 getDescriptorForType() {
2346 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto.getDescriptor();
2347 }
2348
2349 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto getDefaultInstanceForType() {
2350 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto.getDefaultInstance();
2351 }
2352
2353 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto build() {
2354 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto result = buildPartial();
2355 if (!result.isInitialized()) {
2356 throw newUninitializedMessageException(result);
2357 }
2358 return result;
2359 }
2360
2361 private org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto buildParsed()
2362 throws com.google.protobuf.InvalidProtocolBufferException {
2363 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto result = buildPartial();
2364 if (!result.isInitialized()) {
2365 throw newUninitializedMessageException(
2366 result).asInvalidProtocolBufferException();
2367 }
2368 return result;
2369 }
2370
2371 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto buildPartial() {
2372 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto result = new org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto(this);
2373 int from_bitField0_ = bitField0_;
2374 if (protocolSignatureBuilder_ == null) {
2375 if (((bitField0_ & 0x00000001) == 0x00000001)) {
2376 protocolSignature_ = java.util.Collections.unmodifiableList(protocolSignature_);
2377 bitField0_ = (bitField0_ & ~0x00000001);
2378 }
2379 result.protocolSignature_ = protocolSignature_;
2380 } else {
2381 result.protocolSignature_ = protocolSignatureBuilder_.build();
2382 }
2383 onBuilt();
2384 return result;
2385 }
2386
2387 public Builder mergeFrom(com.google.protobuf.Message other) {
2388 if (other instanceof org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto) {
2389 return mergeFrom((org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto)other);
2390 } else {
2391 super.mergeFrom(other);
2392 return this;
2393 }
2394 }
2395
2396 public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto other) {
2397 if (other == org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto.getDefaultInstance()) return this;
2398 if (protocolSignatureBuilder_ == null) {
2399 if (!other.protocolSignature_.isEmpty()) {
2400 if (protocolSignature_.isEmpty()) {
2401 protocolSignature_ = other.protocolSignature_;
2402 bitField0_ = (bitField0_ & ~0x00000001);
2403 } else {
2404 ensureProtocolSignatureIsMutable();
2405 protocolSignature_.addAll(other.protocolSignature_);
2406 }
2407 onChanged();
2408 }
2409 } else {
2410 if (!other.protocolSignature_.isEmpty()) {
2411 if (protocolSignatureBuilder_.isEmpty()) {
2412 protocolSignatureBuilder_.dispose();
2413 protocolSignatureBuilder_ = null;
2414 protocolSignature_ = other.protocolSignature_;
2415 bitField0_ = (bitField0_ & ~0x00000001);
2416 protocolSignatureBuilder_ =
2417 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
2418 getProtocolSignatureFieldBuilder() : null;
2419 } else {
2420 protocolSignatureBuilder_.addAllMessages(other.protocolSignature_);
2421 }
2422 }
2423 }
2424 this.mergeUnknownFields(other.getUnknownFields());
2425 return this;
2426 }
2427
2428 public final boolean isInitialized() {
2429 for (int i = 0; i < getProtocolSignatureCount(); i++) {
2430 if (!getProtocolSignature(i).isInitialized()) {
2431
2432 return false;
2433 }
2434 }
2435 return true;
2436 }
2437
2438 public Builder mergeFrom(
2439 com.google.protobuf.CodedInputStream input,
2440 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2441 throws java.io.IOException {
2442 com.google.protobuf.UnknownFieldSet.Builder unknownFields =
2443 com.google.protobuf.UnknownFieldSet.newBuilder(
2444 this.getUnknownFields());
2445 while (true) {
2446 int tag = input.readTag();
2447 switch (tag) {
2448 case 0:
2449 this.setUnknownFields(unknownFields.build());
2450 onChanged();
2451 return this;
2452 default: {
2453 if (!parseUnknownField(input, unknownFields,
2454 extensionRegistry, tag)) {
2455 this.setUnknownFields(unknownFields.build());
2456 onChanged();
2457 return this;
2458 }
2459 break;
2460 }
2461 case 10: {
2462 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto.Builder subBuilder = org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto.newBuilder();
2463 input.readMessage(subBuilder, extensionRegistry);
2464 addProtocolSignature(subBuilder.buildPartial());
2465 break;
2466 }
2467 }
2468 }
2469 }
2470
2471 private int bitField0_;
2472
2473 // repeated .hadoop.common.ProtocolSignatureProto protocolSignature = 1;
2474 private java.util.List<org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto> protocolSignature_ =
2475 java.util.Collections.emptyList();
2476 private void ensureProtocolSignatureIsMutable() {
2477 if (!((bitField0_ & 0x00000001) == 0x00000001)) {
2478 protocolSignature_ = new java.util.ArrayList<org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto>(protocolSignature_);
2479 bitField0_ |= 0x00000001;
2480 }
2481 }
2482
2483 private com.google.protobuf.RepeatedFieldBuilder<
2484 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto.Builder, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProtoOrBuilder> protocolSignatureBuilder_;
2485
2486 public java.util.List<org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto> getProtocolSignatureList() {
2487 if (protocolSignatureBuilder_ == null) {
2488 return java.util.Collections.unmodifiableList(protocolSignature_);
2489 } else {
2490 return protocolSignatureBuilder_.getMessageList();
2491 }
2492 }
2493 public int getProtocolSignatureCount() {
2494 if (protocolSignatureBuilder_ == null) {
2495 return protocolSignature_.size();
2496 } else {
2497 return protocolSignatureBuilder_.getCount();
2498 }
2499 }
2500 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto getProtocolSignature(int index) {
2501 if (protocolSignatureBuilder_ == null) {
2502 return protocolSignature_.get(index);
2503 } else {
2504 return protocolSignatureBuilder_.getMessage(index);
2505 }
2506 }
2507 public Builder setProtocolSignature(
2508 int index, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto value) {
2509 if (protocolSignatureBuilder_ == null) {
2510 if (value == null) {
2511 throw new NullPointerException();
2512 }
2513 ensureProtocolSignatureIsMutable();
2514 protocolSignature_.set(index, value);
2515 onChanged();
2516 } else {
2517 protocolSignatureBuilder_.setMessage(index, value);
2518 }
2519 return this;
2520 }
2521 public Builder setProtocolSignature(
2522 int index, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto.Builder builderForValue) {
2523 if (protocolSignatureBuilder_ == null) {
2524 ensureProtocolSignatureIsMutable();
2525 protocolSignature_.set(index, builderForValue.build());
2526 onChanged();
2527 } else {
2528 protocolSignatureBuilder_.setMessage(index, builderForValue.build());
2529 }
2530 return this;
2531 }
2532 public Builder addProtocolSignature(org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto value) {
2533 if (protocolSignatureBuilder_ == null) {
2534 if (value == null) {
2535 throw new NullPointerException();
2536 }
2537 ensureProtocolSignatureIsMutable();
2538 protocolSignature_.add(value);
2539 onChanged();
2540 } else {
2541 protocolSignatureBuilder_.addMessage(value);
2542 }
2543 return this;
2544 }
2545 public Builder addProtocolSignature(
2546 int index, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto value) {
2547 if (protocolSignatureBuilder_ == null) {
2548 if (value == null) {
2549 throw new NullPointerException();
2550 }
2551 ensureProtocolSignatureIsMutable();
2552 protocolSignature_.add(index, value);
2553 onChanged();
2554 } else {
2555 protocolSignatureBuilder_.addMessage(index, value);
2556 }
2557 return this;
2558 }
2559 public Builder addProtocolSignature(
2560 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto.Builder builderForValue) {
2561 if (protocolSignatureBuilder_ == null) {
2562 ensureProtocolSignatureIsMutable();
2563 protocolSignature_.add(builderForValue.build());
2564 onChanged();
2565 } else {
2566 protocolSignatureBuilder_.addMessage(builderForValue.build());
2567 }
2568 return this;
2569 }
2570 public Builder addProtocolSignature(
2571 int index, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto.Builder builderForValue) {
2572 if (protocolSignatureBuilder_ == null) {
2573 ensureProtocolSignatureIsMutable();
2574 protocolSignature_.add(index, builderForValue.build());
2575 onChanged();
2576 } else {
2577 protocolSignatureBuilder_.addMessage(index, builderForValue.build());
2578 }
2579 return this;
2580 }
2581 public Builder addAllProtocolSignature(
2582 java.lang.Iterable<? extends org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto> values) {
2583 if (protocolSignatureBuilder_ == null) {
2584 ensureProtocolSignatureIsMutable();
2585 super.addAll(values, protocolSignature_);
2586 onChanged();
2587 } else {
2588 protocolSignatureBuilder_.addAllMessages(values);
2589 }
2590 return this;
2591 }
2592 public Builder clearProtocolSignature() {
2593 if (protocolSignatureBuilder_ == null) {
2594 protocolSignature_ = java.util.Collections.emptyList();
2595 bitField0_ = (bitField0_ & ~0x00000001);
2596 onChanged();
2597 } else {
2598 protocolSignatureBuilder_.clear();
2599 }
2600 return this;
2601 }
2602 public Builder removeProtocolSignature(int index) {
2603 if (protocolSignatureBuilder_ == null) {
2604 ensureProtocolSignatureIsMutable();
2605 protocolSignature_.remove(index);
2606 onChanged();
2607 } else {
2608 protocolSignatureBuilder_.remove(index);
2609 }
2610 return this;
2611 }
2612 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto.Builder getProtocolSignatureBuilder(
2613 int index) {
2614 return getProtocolSignatureFieldBuilder().getBuilder(index);
2615 }
2616 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProtoOrBuilder getProtocolSignatureOrBuilder(
2617 int index) {
2618 if (protocolSignatureBuilder_ == null) {
2619 return protocolSignature_.get(index); } else {
2620 return protocolSignatureBuilder_.getMessageOrBuilder(index);
2621 }
2622 }
2623 public java.util.List<? extends org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProtoOrBuilder>
2624 getProtocolSignatureOrBuilderList() {
2625 if (protocolSignatureBuilder_ != null) {
2626 return protocolSignatureBuilder_.getMessageOrBuilderList();
2627 } else {
2628 return java.util.Collections.unmodifiableList(protocolSignature_);
2629 }
2630 }
2631 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto.Builder addProtocolSignatureBuilder() {
2632 return getProtocolSignatureFieldBuilder().addBuilder(
2633 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto.getDefaultInstance());
2634 }
2635 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto.Builder addProtocolSignatureBuilder(
2636 int index) {
2637 return getProtocolSignatureFieldBuilder().addBuilder(
2638 index, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto.getDefaultInstance());
2639 }
2640 public java.util.List<org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto.Builder>
2641 getProtocolSignatureBuilderList() {
2642 return getProtocolSignatureFieldBuilder().getBuilderList();
2643 }
2644 private com.google.protobuf.RepeatedFieldBuilder<
2645 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto.Builder, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProtoOrBuilder>
2646 getProtocolSignatureFieldBuilder() {
2647 if (protocolSignatureBuilder_ == null) {
2648 protocolSignatureBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
2649 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto.Builder, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProtoOrBuilder>(
2650 protocolSignature_,
2651 ((bitField0_ & 0x00000001) == 0x00000001),
2652 getParentForChildren(),
2653 isClean());
2654 protocolSignature_ = null;
2655 }
2656 return protocolSignatureBuilder_;
2657 }
2658
2659 // @@protoc_insertion_point(builder_scope:hadoop.common.GetProtocolSignatureResponseProto)
2660 }
2661
2662 static {
2663 defaultInstance = new GetProtocolSignatureResponseProto(true);
2664 defaultInstance.initFields();
2665 }
2666
2667 // @@protoc_insertion_point(class_scope:hadoop.common.GetProtocolSignatureResponseProto)
2668 }
2669
2670 public interface ProtocolSignatureProtoOrBuilder
2671 extends com.google.protobuf.MessageOrBuilder {
2672
2673 // required uint64 version = 1;
2674 boolean hasVersion();
2675 long getVersion();
2676
2677 // repeated uint32 methods = 2;
2678 java.util.List<java.lang.Integer> getMethodsList();
2679 int getMethodsCount();
2680 int getMethods(int index);
2681 }
2682 public static final class ProtocolSignatureProto extends
2683 com.google.protobuf.GeneratedMessage
2684 implements ProtocolSignatureProtoOrBuilder {
2685 // Use ProtocolSignatureProto.newBuilder() to construct.
2686 private ProtocolSignatureProto(Builder builder) {
2687 super(builder);
2688 }
2689 private ProtocolSignatureProto(boolean noInit) {}
2690
2691 private static final ProtocolSignatureProto defaultInstance;
2692 public static ProtocolSignatureProto getDefaultInstance() {
2693 return defaultInstance;
2694 }
2695
2696 public ProtocolSignatureProto getDefaultInstanceForType() {
2697 return defaultInstance;
2698 }
2699
2700 public static final com.google.protobuf.Descriptors.Descriptor
2701 getDescriptor() {
2702 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_ProtocolSignatureProto_descriptor;
2703 }
2704
2705 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
2706 internalGetFieldAccessorTable() {
2707 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_ProtocolSignatureProto_fieldAccessorTable;
2708 }
2709
2710 private int bitField0_;
2711 // required uint64 version = 1;
2712 public static final int VERSION_FIELD_NUMBER = 1;
2713 private long version_;
2714 public boolean hasVersion() {
2715 return ((bitField0_ & 0x00000001) == 0x00000001);
2716 }
2717 public long getVersion() {
2718 return version_;
2719 }
2720
2721 // repeated uint32 methods = 2;
2722 public static final int METHODS_FIELD_NUMBER = 2;
2723 private java.util.List<java.lang.Integer> methods_;
2724 public java.util.List<java.lang.Integer>
2725 getMethodsList() {
2726 return methods_;
2727 }
2728 public int getMethodsCount() {
2729 return methods_.size();
2730 }
2731 public int getMethods(int index) {
2732 return methods_.get(index);
2733 }
2734
2735 private void initFields() {
2736 version_ = 0L;
2737 methods_ = java.util.Collections.emptyList();;
2738 }
2739 private byte memoizedIsInitialized = -1;
2740 public final boolean isInitialized() {
2741 byte isInitialized = memoizedIsInitialized;
2742 if (isInitialized != -1) return isInitialized == 1;
2743
2744 if (!hasVersion()) {
2745 memoizedIsInitialized = 0;
2746 return false;
2747 }
2748 memoizedIsInitialized = 1;
2749 return true;
2750 }
2751
2752 public void writeTo(com.google.protobuf.CodedOutputStream output)
2753 throws java.io.IOException {
2754 getSerializedSize();
2755 if (((bitField0_ & 0x00000001) == 0x00000001)) {
2756 output.writeUInt64(1, version_);
2757 }
2758 for (int i = 0; i < methods_.size(); i++) {
2759 output.writeUInt32(2, methods_.get(i));
2760 }
2761 getUnknownFields().writeTo(output);
2762 }
2763
2764 private int memoizedSerializedSize = -1;
2765 public int getSerializedSize() {
2766 int size = memoizedSerializedSize;
2767 if (size != -1) return size;
2768
2769 size = 0;
2770 if (((bitField0_ & 0x00000001) == 0x00000001)) {
2771 size += com.google.protobuf.CodedOutputStream
2772 .computeUInt64Size(1, version_);
2773 }
2774 {
2775 int dataSize = 0;
2776 for (int i = 0; i < methods_.size(); i++) {
2777 dataSize += com.google.protobuf.CodedOutputStream
2778 .computeUInt32SizeNoTag(methods_.get(i));
2779 }
2780 size += dataSize;
2781 size += 1 * getMethodsList().size();
2782 }
2783 size += getUnknownFields().getSerializedSize();
2784 memoizedSerializedSize = size;
2785 return size;
2786 }
2787
2788 private static final long serialVersionUID = 0L;
2789 @java.lang.Override
2790 protected java.lang.Object writeReplace()
2791 throws java.io.ObjectStreamException {
2792 return super.writeReplace();
2793 }
2794
2795 @java.lang.Override
2796 public boolean equals(final java.lang.Object obj) {
2797 if (obj == this) {
2798 return true;
2799 }
2800 if (!(obj instanceof org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto)) {
2801 return super.equals(obj);
2802 }
2803 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto other = (org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto) obj;
2804
2805 boolean result = true;
2806 result = result && (hasVersion() == other.hasVersion());
2807 if (hasVersion()) {
2808 result = result && (getVersion()
2809 == other.getVersion());
2810 }
2811 result = result && getMethodsList()
2812 .equals(other.getMethodsList());
2813 result = result &&
2814 getUnknownFields().equals(other.getUnknownFields());
2815 return result;
2816 }
2817
2818 @java.lang.Override
2819 public int hashCode() {
2820 int hash = 41;
2821 hash = (19 * hash) + getDescriptorForType().hashCode();
2822 if (hasVersion()) {
2823 hash = (37 * hash) + VERSION_FIELD_NUMBER;
2824 hash = (53 * hash) + hashLong(getVersion());
2825 }
2826 if (getMethodsCount() > 0) {
2827 hash = (37 * hash) + METHODS_FIELD_NUMBER;
2828 hash = (53 * hash) + getMethodsList().hashCode();
2829 }
2830 hash = (29 * hash) + getUnknownFields().hashCode();
2831 return hash;
2832 }
2833
2834 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto parseFrom(
2835 com.google.protobuf.ByteString data)
2836 throws com.google.protobuf.InvalidProtocolBufferException {
2837 return newBuilder().mergeFrom(data).buildParsed();
2838 }
2839 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto parseFrom(
2840 com.google.protobuf.ByteString data,
2841 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2842 throws com.google.protobuf.InvalidProtocolBufferException {
2843 return newBuilder().mergeFrom(data, extensionRegistry)
2844 .buildParsed();
2845 }
2846 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto parseFrom(byte[] data)
2847 throws com.google.protobuf.InvalidProtocolBufferException {
2848 return newBuilder().mergeFrom(data).buildParsed();
2849 }
2850 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto parseFrom(
2851 byte[] data,
2852 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2853 throws com.google.protobuf.InvalidProtocolBufferException {
2854 return newBuilder().mergeFrom(data, extensionRegistry)
2855 .buildParsed();
2856 }
2857 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto parseFrom(java.io.InputStream input)
2858 throws java.io.IOException {
2859 return newBuilder().mergeFrom(input).buildParsed();
2860 }
2861 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto parseFrom(
2862 java.io.InputStream input,
2863 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2864 throws java.io.IOException {
2865 return newBuilder().mergeFrom(input, extensionRegistry)
2866 .buildParsed();
2867 }
2868 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto parseDelimitedFrom(java.io.InputStream input)
2869 throws java.io.IOException {
2870 Builder builder = newBuilder();
2871 if (builder.mergeDelimitedFrom(input)) {
2872 return builder.buildParsed();
2873 } else {
2874 return null;
2875 }
2876 }
2877 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto parseDelimitedFrom(
2878 java.io.InputStream input,
2879 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2880 throws java.io.IOException {
2881 Builder builder = newBuilder();
2882 if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
2883 return builder.buildParsed();
2884 } else {
2885 return null;
2886 }
2887 }
2888 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto parseFrom(
2889 com.google.protobuf.CodedInputStream input)
2890 throws java.io.IOException {
2891 return newBuilder().mergeFrom(input).buildParsed();
2892 }
2893 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto parseFrom(
2894 com.google.protobuf.CodedInputStream input,
2895 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2896 throws java.io.IOException {
2897 return newBuilder().mergeFrom(input, extensionRegistry)
2898 .buildParsed();
2899 }
2900
2901 public static Builder newBuilder() { return Builder.create(); }
2902 public Builder newBuilderForType() { return newBuilder(); }
2903 public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto prototype) {
2904 return newBuilder().mergeFrom(prototype);
2905 }
2906 public Builder toBuilder() { return newBuilder(this); }
2907
2908 @java.lang.Override
2909 protected Builder newBuilderForType(
2910 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2911 Builder builder = new Builder(parent);
2912 return builder;
2913 }
2914 public static final class Builder extends
2915 com.google.protobuf.GeneratedMessage.Builder<Builder>
2916 implements org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProtoOrBuilder {
2917 public static final com.google.protobuf.Descriptors.Descriptor
2918 getDescriptor() {
2919 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_ProtocolSignatureProto_descriptor;
2920 }
2921
2922 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
2923 internalGetFieldAccessorTable() {
2924 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_ProtocolSignatureProto_fieldAccessorTable;
2925 }
2926
2927 // Construct using org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto.newBuilder()
2928 private Builder() {
2929 maybeForceBuilderInitialization();
2930 }
2931
2932 private Builder(BuilderParent parent) {
2933 super(parent);
2934 maybeForceBuilderInitialization();
2935 }
2936 private void maybeForceBuilderInitialization() {
2937 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
2938 }
2939 }
2940 private static Builder create() {
2941 return new Builder();
2942 }
2943
2944 public Builder clear() {
2945 super.clear();
2946 version_ = 0L;
2947 bitField0_ = (bitField0_ & ~0x00000001);
2948 methods_ = java.util.Collections.emptyList();;
2949 bitField0_ = (bitField0_ & ~0x00000002);
2950 return this;
2951 }
2952
2953 public Builder clone() {
2954 return create().mergeFrom(buildPartial());
2955 }
2956
2957 public com.google.protobuf.Descriptors.Descriptor
2958 getDescriptorForType() {
2959 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto.getDescriptor();
2960 }
2961
2962 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto getDefaultInstanceForType() {
2963 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto.getDefaultInstance();
2964 }
2965
2966 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto build() {
2967 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto result = buildPartial();
2968 if (!result.isInitialized()) {
2969 throw newUninitializedMessageException(result);
2970 }
2971 return result;
2972 }
2973
2974 private org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto buildParsed()
2975 throws com.google.protobuf.InvalidProtocolBufferException {
2976 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto result = buildPartial();
2977 if (!result.isInitialized()) {
2978 throw newUninitializedMessageException(
2979 result).asInvalidProtocolBufferException();
2980 }
2981 return result;
2982 }
2983
2984 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto buildPartial() {
2985 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto result = new org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto(this);
2986 int from_bitField0_ = bitField0_;
2987 int to_bitField0_ = 0;
2988 if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
2989 to_bitField0_ |= 0x00000001;
2990 }
2991 result.version_ = version_;
2992 if (((bitField0_ & 0x00000002) == 0x00000002)) {
2993 methods_ = java.util.Collections.unmodifiableList(methods_);
2994 bitField0_ = (bitField0_ & ~0x00000002);
2995 }
2996 result.methods_ = methods_;
2997 result.bitField0_ = to_bitField0_;
2998 onBuilt();
2999 return result;
3000 }
3001
3002 public Builder mergeFrom(com.google.protobuf.Message other) {
3003 if (other instanceof org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto) {
3004 return mergeFrom((org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto)other);
3005 } else {
3006 super.mergeFrom(other);
3007 return this;
3008 }
3009 }
3010
3011 public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto other) {
3012 if (other == org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto.getDefaultInstance()) return this;
3013 if (other.hasVersion()) {
3014 setVersion(other.getVersion());
3015 }
3016 if (!other.methods_.isEmpty()) {
3017 if (methods_.isEmpty()) {
3018 methods_ = other.methods_;
3019 bitField0_ = (bitField0_ & ~0x00000002);
3020 } else {
3021 ensureMethodsIsMutable();
3022 methods_.addAll(other.methods_);
3023 }
3024 onChanged();
3025 }
3026 this.mergeUnknownFields(other.getUnknownFields());
3027 return this;
3028 }
3029
3030 public final boolean isInitialized() {
3031 if (!hasVersion()) {
3032
3033 return false;
3034 }
3035 return true;
3036 }
3037
3038 public Builder mergeFrom(
3039 com.google.protobuf.CodedInputStream input,
3040 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3041 throws java.io.IOException {
3042 com.google.protobuf.UnknownFieldSet.Builder unknownFields =
3043 com.google.protobuf.UnknownFieldSet.newBuilder(
3044 this.getUnknownFields());
3045 while (true) {
3046 int tag = input.readTag();
3047 switch (tag) {
3048 case 0:
3049 this.setUnknownFields(unknownFields.build());
3050 onChanged();
3051 return this;
3052 default: {
3053 if (!parseUnknownField(input, unknownFields,
3054 extensionRegistry, tag)) {
3055 this.setUnknownFields(unknownFields.build());
3056 onChanged();
3057 return this;
3058 }
3059 break;
3060 }
3061 case 8: {
3062 bitField0_ |= 0x00000001;
3063 version_ = input.readUInt64();
3064 break;
3065 }
3066 case 16: {
3067 ensureMethodsIsMutable();
3068 methods_.add(input.readUInt32());
3069 break;
3070 }
3071 case 18: {
3072 int length = input.readRawVarint32();
3073 int limit = input.pushLimit(length);
3074 while (input.getBytesUntilLimit() > 0) {
3075 addMethods(input.readUInt32());
3076 }
3077 input.popLimit(limit);
3078 break;
3079 }
3080 }
3081 }
3082 }
3083
3084 private int bitField0_;
3085
3086 // required uint64 version = 1;
3087 private long version_ ;
3088 public boolean hasVersion() {
3089 return ((bitField0_ & 0x00000001) == 0x00000001);
3090 }
3091 public long getVersion() {
3092 return version_;
3093 }
3094 public Builder setVersion(long value) {
3095 bitField0_ |= 0x00000001;
3096 version_ = value;
3097 onChanged();
3098 return this;
3099 }
3100 public Builder clearVersion() {
3101 bitField0_ = (bitField0_ & ~0x00000001);
3102 version_ = 0L;
3103 onChanged();
3104 return this;
3105 }
3106
3107 // repeated uint32 methods = 2;
3108 private java.util.List<java.lang.Integer> methods_ = java.util.Collections.emptyList();;
3109 private void ensureMethodsIsMutable() {
3110 if (!((bitField0_ & 0x00000002) == 0x00000002)) {
3111 methods_ = new java.util.ArrayList<java.lang.Integer>(methods_);
3112 bitField0_ |= 0x00000002;
3113 }
3114 }
3115 public java.util.List<java.lang.Integer>
3116 getMethodsList() {
3117 return java.util.Collections.unmodifiableList(methods_);
3118 }
3119 public int getMethodsCount() {
3120 return methods_.size();
3121 }
3122 public int getMethods(int index) {
3123 return methods_.get(index);
3124 }
3125 public Builder setMethods(
3126 int index, int value) {
3127 ensureMethodsIsMutable();
3128 methods_.set(index, value);
3129 onChanged();
3130 return this;
3131 }
3132 public Builder addMethods(int value) {
3133 ensureMethodsIsMutable();
3134 methods_.add(value);
3135 onChanged();
3136 return this;
3137 }
3138 public Builder addAllMethods(
3139 java.lang.Iterable<? extends java.lang.Integer> values) {
3140 ensureMethodsIsMutable();
3141 super.addAll(values, methods_);
3142 onChanged();
3143 return this;
3144 }
3145 public Builder clearMethods() {
3146 methods_ = java.util.Collections.emptyList();;
3147 bitField0_ = (bitField0_ & ~0x00000002);
3148 onChanged();
3149 return this;
3150 }
3151
3152 // @@protoc_insertion_point(builder_scope:hadoop.common.ProtocolSignatureProto)
3153 }
3154
3155 static {
3156 defaultInstance = new ProtocolSignatureProto(true);
3157 defaultInstance.initFields();
3158 }
3159
3160 // @@protoc_insertion_point(class_scope:hadoop.common.ProtocolSignatureProto)
3161 }
3162
3163 public static abstract class ProtocolInfoService
3164 implements com.google.protobuf.Service {
3165 protected ProtocolInfoService() {}
3166
3167 public interface Interface {
3168 public abstract void getProtocolVersions(
3169 com.google.protobuf.RpcController controller,
3170 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto request,
3171 com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto> done);
3172
3173 public abstract void getProtocolSignature(
3174 com.google.protobuf.RpcController controller,
3175 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto request,
3176 com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto> done);
3177
3178 }
3179
3180 public static com.google.protobuf.Service newReflectiveService(
3181 final Interface impl) {
3182 return new ProtocolInfoService() {
3183 @java.lang.Override
3184 public void getProtocolVersions(
3185 com.google.protobuf.RpcController controller,
3186 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto request,
3187 com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto> done) {
3188 impl.getProtocolVersions(controller, request, done);
3189 }
3190
3191 @java.lang.Override
3192 public void getProtocolSignature(
3193 com.google.protobuf.RpcController controller,
3194 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto request,
3195 com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto> done) {
3196 impl.getProtocolSignature(controller, request, done);
3197 }
3198
3199 };
3200 }
3201
3202 public static com.google.protobuf.BlockingService
3203 newReflectiveBlockingService(final BlockingInterface impl) {
3204 return new com.google.protobuf.BlockingService() {
3205 public final com.google.protobuf.Descriptors.ServiceDescriptor
3206 getDescriptorForType() {
3207 return getDescriptor();
3208 }
3209
3210 public final com.google.protobuf.Message callBlockingMethod(
3211 com.google.protobuf.Descriptors.MethodDescriptor method,
3212 com.google.protobuf.RpcController controller,
3213 com.google.protobuf.Message request)
3214 throws com.google.protobuf.ServiceException {
3215 if (method.getService() != getDescriptor()) {
3216 throw new java.lang.IllegalArgumentException(
3217 "Service.callBlockingMethod() given method descriptor for " +
3218 "wrong service type.");
3219 }
3220 switch(method.getIndex()) {
3221 case 0:
3222 return impl.getProtocolVersions(controller, (org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto)request);
3223 case 1:
3224 return impl.getProtocolSignature(controller, (org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto)request);
3225 default:
3226 throw new java.lang.AssertionError("Can't get here.");
3227 }
3228 }
3229
3230 public final com.google.protobuf.Message
3231 getRequestPrototype(
3232 com.google.protobuf.Descriptors.MethodDescriptor method) {
3233 if (method.getService() != getDescriptor()) {
3234 throw new java.lang.IllegalArgumentException(
3235 "Service.getRequestPrototype() given method " +
3236 "descriptor for wrong service type.");
3237 }
3238 switch(method.getIndex()) {
3239 case 0:
3240 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto.getDefaultInstance();
3241 case 1:
3242 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto.getDefaultInstance();
3243 default:
3244 throw new java.lang.AssertionError("Can't get here.");
3245 }
3246 }
3247
3248 public final com.google.protobuf.Message
3249 getResponsePrototype(
3250 com.google.protobuf.Descriptors.MethodDescriptor method) {
3251 if (method.getService() != getDescriptor()) {
3252 throw new java.lang.IllegalArgumentException(
3253 "Service.getResponsePrototype() given method " +
3254 "descriptor for wrong service type.");
3255 }
3256 switch(method.getIndex()) {
3257 case 0:
3258 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto.getDefaultInstance();
3259 case 1:
3260 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto.getDefaultInstance();
3261 default:
3262 throw new java.lang.AssertionError("Can't get here.");
3263 }
3264 }
3265
3266 };
3267 }
3268
3269 public abstract void getProtocolVersions(
3270 com.google.protobuf.RpcController controller,
3271 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto request,
3272 com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto> done);
3273
3274 public abstract void getProtocolSignature(
3275 com.google.protobuf.RpcController controller,
3276 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto request,
3277 com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto> done);
3278
3279 public static final
3280 com.google.protobuf.Descriptors.ServiceDescriptor
3281 getDescriptor() {
3282 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.getDescriptor().getServices().get(0);
3283 }
3284 public final com.google.protobuf.Descriptors.ServiceDescriptor
3285 getDescriptorForType() {
3286 return getDescriptor();
3287 }
3288
3289 public final void callMethod(
3290 com.google.protobuf.Descriptors.MethodDescriptor method,
3291 com.google.protobuf.RpcController controller,
3292 com.google.protobuf.Message request,
3293 com.google.protobuf.RpcCallback<
3294 com.google.protobuf.Message> done) {
3295 if (method.getService() != getDescriptor()) {
3296 throw new java.lang.IllegalArgumentException(
3297 "Service.callMethod() given method descriptor for wrong " +
3298 "service type.");
3299 }
3300 switch(method.getIndex()) {
3301 case 0:
3302 this.getProtocolVersions(controller, (org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto)request,
3303 com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto>specializeCallback(
3304 done));
3305 return;
3306 case 1:
3307 this.getProtocolSignature(controller, (org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto)request,
3308 com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto>specializeCallback(
3309 done));
3310 return;
3311 default:
3312 throw new java.lang.AssertionError("Can't get here.");
3313 }
3314 }
3315
3316 public final com.google.protobuf.Message
3317 getRequestPrototype(
3318 com.google.protobuf.Descriptors.MethodDescriptor method) {
3319 if (method.getService() != getDescriptor()) {
3320 throw new java.lang.IllegalArgumentException(
3321 "Service.getRequestPrototype() given method " +
3322 "descriptor for wrong service type.");
3323 }
3324 switch(method.getIndex()) {
3325 case 0:
3326 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto.getDefaultInstance();
3327 case 1:
3328 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto.getDefaultInstance();
3329 default:
3330 throw new java.lang.AssertionError("Can't get here.");
3331 }
3332 }
3333
3334 public final com.google.protobuf.Message
3335 getResponsePrototype(
3336 com.google.protobuf.Descriptors.MethodDescriptor method) {
3337 if (method.getService() != getDescriptor()) {
3338 throw new java.lang.IllegalArgumentException(
3339 "Service.getResponsePrototype() given method " +
3340 "descriptor for wrong service type.");
3341 }
3342 switch(method.getIndex()) {
3343 case 0:
3344 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto.getDefaultInstance();
3345 case 1:
3346 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto.getDefaultInstance();
3347 default:
3348 throw new java.lang.AssertionError("Can't get here.");
3349 }
3350 }
3351
3352 public static Stub newStub(
3353 com.google.protobuf.RpcChannel channel) {
3354 return new Stub(channel);
3355 }
3356
3357 public static final class Stub extends org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolInfoService implements Interface {
3358 private Stub(com.google.protobuf.RpcChannel channel) {
3359 this.channel = channel;
3360 }
3361
3362 private final com.google.protobuf.RpcChannel channel;
3363
3364 public com.google.protobuf.RpcChannel getChannel() {
3365 return channel;
3366 }
3367
3368 public void getProtocolVersions(
3369 com.google.protobuf.RpcController controller,
3370 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto request,
3371 com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto> done) {
3372 channel.callMethod(
3373 getDescriptor().getMethods().get(0),
3374 controller,
3375 request,
3376 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto.getDefaultInstance(),
3377 com.google.protobuf.RpcUtil.generalizeCallback(
3378 done,
3379 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto.class,
3380 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto.getDefaultInstance()));
3381 }
3382
3383 public void getProtocolSignature(
3384 com.google.protobuf.RpcController controller,
3385 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto request,
3386 com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto> done) {
3387 channel.callMethod(
3388 getDescriptor().getMethods().get(1),
3389 controller,
3390 request,
3391 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto.getDefaultInstance(),
3392 com.google.protobuf.RpcUtil.generalizeCallback(
3393 done,
3394 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto.class,
3395 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto.getDefaultInstance()));
3396 }
3397 }
3398
3399 public static BlockingInterface newBlockingStub(
3400 com.google.protobuf.BlockingRpcChannel channel) {
3401 return new BlockingStub(channel);
3402 }
3403
3404 public interface BlockingInterface {
3405 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto getProtocolVersions(
3406 com.google.protobuf.RpcController controller,
3407 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto request)
3408 throws com.google.protobuf.ServiceException;
3409
3410 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto getProtocolSignature(
3411 com.google.protobuf.RpcController controller,
3412 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto request)
3413 throws com.google.protobuf.ServiceException;
3414 }
3415
3416 private static final class BlockingStub implements BlockingInterface {
3417 private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
3418 this.channel = channel;
3419 }
3420
3421 private final com.google.protobuf.BlockingRpcChannel channel;
3422
3423 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto getProtocolVersions(
3424 com.google.protobuf.RpcController controller,
3425 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto request)
3426 throws com.google.protobuf.ServiceException {
3427 return (org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto) channel.callBlockingMethod(
3428 getDescriptor().getMethods().get(0),
3429 controller,
3430 request,
3431 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto.getDefaultInstance());
3432 }
3433
3434
3435 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto getProtocolSignature(
3436 com.google.protobuf.RpcController controller,
3437 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto request)
3438 throws com.google.protobuf.ServiceException {
3439 return (org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto) channel.callBlockingMethod(
3440 getDescriptor().getMethods().get(1),
3441 controller,
3442 request,
3443 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto.getDefaultInstance());
3444 }
3445
3446 }
3447 }
3448
3449 private static com.google.protobuf.Descriptors.Descriptor
3450 internal_static_hadoop_common_GetProtocolVersionsRequestProto_descriptor;
3451 private static
3452 com.google.protobuf.GeneratedMessage.FieldAccessorTable
3453 internal_static_hadoop_common_GetProtocolVersionsRequestProto_fieldAccessorTable;
3454 private static com.google.protobuf.Descriptors.Descriptor
3455 internal_static_hadoop_common_ProtocolVersionProto_descriptor;
3456 private static
3457 com.google.protobuf.GeneratedMessage.FieldAccessorTable
3458 internal_static_hadoop_common_ProtocolVersionProto_fieldAccessorTable;
3459 private static com.google.protobuf.Descriptors.Descriptor
3460 internal_static_hadoop_common_GetProtocolVersionsResponseProto_descriptor;
3461 private static
3462 com.google.protobuf.GeneratedMessage.FieldAccessorTable
3463 internal_static_hadoop_common_GetProtocolVersionsResponseProto_fieldAccessorTable;
3464 private static com.google.protobuf.Descriptors.Descriptor
3465 internal_static_hadoop_common_GetProtocolSignatureRequestProto_descriptor;
3466 private static
3467 com.google.protobuf.GeneratedMessage.FieldAccessorTable
3468 internal_static_hadoop_common_GetProtocolSignatureRequestProto_fieldAccessorTable;
3469 private static com.google.protobuf.Descriptors.Descriptor
3470 internal_static_hadoop_common_GetProtocolSignatureResponseProto_descriptor;
3471 private static
3472 com.google.protobuf.GeneratedMessage.FieldAccessorTable
3473 internal_static_hadoop_common_GetProtocolSignatureResponseProto_fieldAccessorTable;
3474 private static com.google.protobuf.Descriptors.Descriptor
3475 internal_static_hadoop_common_ProtocolSignatureProto_descriptor;
3476 private static
3477 com.google.protobuf.GeneratedMessage.FieldAccessorTable
3478 internal_static_hadoop_common_ProtocolSignatureProto_fieldAccessorTable;
3479
3480 public static com.google.protobuf.Descriptors.FileDescriptor
3481 getDescriptor() {
3482 return descriptor;
3483 }
3484 private static com.google.protobuf.Descriptors.FileDescriptor
3485 descriptor;
3486 static {
3487 java.lang.String[] descriptorData = {
3488 "\n\022ProtocolInfo.proto\022\rhadoop.common\"3\n\037G" +
3489 "etProtocolVersionsRequestProto\022\020\n\010protoc" +
3490 "ol\030\001 \002(\t\"9\n\024ProtocolVersionProto\022\017\n\007rpcK" +
3491 "ind\030\001 \002(\t\022\020\n\010versions\030\002 \003(\004\"a\n GetProtoc" +
3492 "olVersionsResponseProto\022=\n\020protocolVersi" +
3493 "ons\030\001 \003(\0132#.hadoop.common.ProtocolVersio" +
3494 "nProto\"E\n GetProtocolSignatureRequestPro" +
3495 "to\022\020\n\010protocol\030\001 \002(\t\022\017\n\007rpcKind\030\002 \002(\t\"e\n" +
3496 "!GetProtocolSignatureResponseProto\022@\n\021pr" +
3497 "otocolSignature\030\001 \003(\0132%.hadoop.common.Pr",
3498 "otocolSignatureProto\":\n\026ProtocolSignatur" +
3499 "eProto\022\017\n\007version\030\001 \002(\004\022\017\n\007methods\030\002 \003(\r" +
3500 "2\210\002\n\023ProtocolInfoService\022v\n\023getProtocolV" +
3501 "ersions\022..hadoop.common.GetProtocolVersi" +
3502 "onsRequestProto\032/.hadoop.common.GetProto" +
3503 "colVersionsResponseProto\022y\n\024getProtocolS" +
3504 "ignature\022/.hadoop.common.GetProtocolSign" +
3505 "atureRequestProto\0320.hadoop.common.GetPro" +
3506 "tocolSignatureResponseProtoB:\n\036org.apach" +
3507 "e.hadoop.ipc.protobufB\022ProtocolInfoProto",
3508 "s\210\001\001\240\001\001"
3509 };
3510 com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
3511 new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
3512 public com.google.protobuf.ExtensionRegistry assignDescriptors(
3513 com.google.protobuf.Descriptors.FileDescriptor root) {
3514 descriptor = root;
3515 internal_static_hadoop_common_GetProtocolVersionsRequestProto_descriptor =
3516 getDescriptor().getMessageTypes().get(0);
3517 internal_static_hadoop_common_GetProtocolVersionsRequestProto_fieldAccessorTable = new
3518 com.google.protobuf.GeneratedMessage.FieldAccessorTable(
3519 internal_static_hadoop_common_GetProtocolVersionsRequestProto_descriptor,
3520 new java.lang.String[] { "Protocol", },
3521 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto.class,
3522 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto.Builder.class);
3523 internal_static_hadoop_common_ProtocolVersionProto_descriptor =
3524 getDescriptor().getMessageTypes().get(1);
3525 internal_static_hadoop_common_ProtocolVersionProto_fieldAccessorTable = new
3526 com.google.protobuf.GeneratedMessage.FieldAccessorTable(
3527 internal_static_hadoop_common_ProtocolVersionProto_descriptor,
3528 new java.lang.String[] { "RpcKind", "Versions", },
3529 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto.class,
3530 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto.Builder.class);
3531 internal_static_hadoop_common_GetProtocolVersionsResponseProto_descriptor =
3532 getDescriptor().getMessageTypes().get(2);
3533 internal_static_hadoop_common_GetProtocolVersionsResponseProto_fieldAccessorTable = new
3534 com.google.protobuf.GeneratedMessage.FieldAccessorTable(
3535 internal_static_hadoop_common_GetProtocolVersionsResponseProto_descriptor,
3536 new java.lang.String[] { "ProtocolVersions", },
3537 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto.class,
3538 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto.Builder.class);
3539 internal_static_hadoop_common_GetProtocolSignatureRequestProto_descriptor =
3540 getDescriptor().getMessageTypes().get(3);
3541 internal_static_hadoop_common_GetProtocolSignatureRequestProto_fieldAccessorTable = new
3542 com.google.protobuf.GeneratedMessage.FieldAccessorTable(
3543 internal_static_hadoop_common_GetProtocolSignatureRequestProto_descriptor,
3544 new java.lang.String[] { "Protocol", "RpcKind", },
3545 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto.class,
3546 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto.Builder.class);
3547 internal_static_hadoop_common_GetProtocolSignatureResponseProto_descriptor =
3548 getDescriptor().getMessageTypes().get(4);
3549 internal_static_hadoop_common_GetProtocolSignatureResponseProto_fieldAccessorTable = new
3550 com.google.protobuf.GeneratedMessage.FieldAccessorTable(
3551 internal_static_hadoop_common_GetProtocolSignatureResponseProto_descriptor,
3552 new java.lang.String[] { "ProtocolSignature", },
3553 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto.class,
3554 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto.Builder.class);
3555 internal_static_hadoop_common_ProtocolSignatureProto_descriptor =
3556 getDescriptor().getMessageTypes().get(5);
3557 internal_static_hadoop_common_ProtocolSignatureProto_fieldAccessorTable = new
3558 com.google.protobuf.GeneratedMessage.FieldAccessorTable(
3559 internal_static_hadoop_common_ProtocolSignatureProto_descriptor,
3560 new java.lang.String[] { "Version", "Methods", },
3561 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto.class,
3562 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto.Builder.class);
3563 return null;
3564 }
3565 };
3566 com.google.protobuf.Descriptors.FileDescriptor
3567 .internalBuildGeneratedFileFrom(descriptorData,
3568 new com.google.protobuf.Descriptors.FileDescriptor[] {
3569 }, assigner);
3570 }
3571
3572 // @@protoc_insertion_point(outer_class_scope)
3573 }