1
2
3
4 package org.apache.hadoop.hbase.protobuf.generated;
5
6 public final class RegionServerStatusProtos {
7 private RegionServerStatusProtos() {}
8 public static void registerAllExtensions(
9 com.google.protobuf.ExtensionRegistry registry) {
10 }
11 public interface RegionServerStartupRequestOrBuilder
12 extends com.google.protobuf.MessageOrBuilder {
13
14
15
16
17
18
19
20
21
22 boolean hasPort();
23
24
25
26
27
28
29
30 int getPort();
31
32
33
34
35
36
37
38
39
40 boolean hasServerStartCode();
41
42
43
44
45
46
47
48 long getServerStartCode();
49
50
51
52
53
54
55
56
57
58 boolean hasServerCurrentTime();
59
60
61
62
63
64
65
66 long getServerCurrentTime();
67
68
69
70
71
72
73
74
75
76 boolean hasUseThisHostnameInstead();
77
78
79
80
81
82
83
84 java.lang.String getUseThisHostnameInstead();
85
86
87
88
89
90
91
92 com.google.protobuf.ByteString
93 getUseThisHostnameInsteadBytes();
94 }
95
96
97
98 public static final class RegionServerStartupRequest extends
99 com.google.protobuf.GeneratedMessage
100 implements RegionServerStartupRequestOrBuilder {
101
102 private RegionServerStartupRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
103 super(builder);
104 this.unknownFields = builder.getUnknownFields();
105 }
106 private RegionServerStartupRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
107
108 private static final RegionServerStartupRequest defaultInstance;
109 public static RegionServerStartupRequest getDefaultInstance() {
110 return defaultInstance;
111 }
112
113 public RegionServerStartupRequest getDefaultInstanceForType() {
114 return defaultInstance;
115 }
116
117 private final com.google.protobuf.UnknownFieldSet unknownFields;
118 @java.lang.Override
119 public final com.google.protobuf.UnknownFieldSet
120 getUnknownFields() {
121 return this.unknownFields;
122 }
123 private RegionServerStartupRequest(
124 com.google.protobuf.CodedInputStream input,
125 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
126 throws com.google.protobuf.InvalidProtocolBufferException {
127 initFields();
128 int mutable_bitField0_ = 0;
129 com.google.protobuf.UnknownFieldSet.Builder unknownFields =
130 com.google.protobuf.UnknownFieldSet.newBuilder();
131 try {
132 boolean done = false;
133 while (!done) {
134 int tag = input.readTag();
135 switch (tag) {
136 case 0:
137 done = true;
138 break;
139 default: {
140 if (!parseUnknownField(input, unknownFields,
141 extensionRegistry, tag)) {
142 done = true;
143 }
144 break;
145 }
146 case 8: {
147 bitField0_ |= 0x00000001;
148 port_ = input.readUInt32();
149 break;
150 }
151 case 16: {
152 bitField0_ |= 0x00000002;
153 serverStartCode_ = input.readUInt64();
154 break;
155 }
156 case 24: {
157 bitField0_ |= 0x00000004;
158 serverCurrentTime_ = input.readUInt64();
159 break;
160 }
161 case 34: {
162 bitField0_ |= 0x00000008;
163 useThisHostnameInstead_ = input.readBytes();
164 break;
165 }
166 }
167 }
168 } catch (com.google.protobuf.InvalidProtocolBufferException e) {
169 throw e.setUnfinishedMessage(this);
170 } catch (java.io.IOException e) {
171 throw new com.google.protobuf.InvalidProtocolBufferException(
172 e.getMessage()).setUnfinishedMessage(this);
173 } finally {
174 this.unknownFields = unknownFields.build();
175 makeExtensionsImmutable();
176 }
177 }
178 public static final com.google.protobuf.Descriptors.Descriptor
179 getDescriptor() {
180 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerStartupRequest_descriptor;
181 }
182
183 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
184 internalGetFieldAccessorTable() {
185 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerStartupRequest_fieldAccessorTable
186 .ensureFieldAccessorsInitialized(
187 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest.class, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest.Builder.class);
188 }
189
190 public static com.google.protobuf.Parser<RegionServerStartupRequest> PARSER =
191 new com.google.protobuf.AbstractParser<RegionServerStartupRequest>() {
192 public RegionServerStartupRequest parsePartialFrom(
193 com.google.protobuf.CodedInputStream input,
194 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
195 throws com.google.protobuf.InvalidProtocolBufferException {
196 return new RegionServerStartupRequest(input, extensionRegistry);
197 }
198 };
199
200 @java.lang.Override
201 public com.google.protobuf.Parser<RegionServerStartupRequest> getParserForType() {
202 return PARSER;
203 }
204
205 private int bitField0_;
206
207 public static final int PORT_FIELD_NUMBER = 1;
208 private int port_;
209
210
211
212
213
214
215
216 public boolean hasPort() {
217 return ((bitField0_ & 0x00000001) == 0x00000001);
218 }
219
220
221
222
223
224
225
226 public int getPort() {
227 return port_;
228 }
229
230
231 public static final int SERVER_START_CODE_FIELD_NUMBER = 2;
232 private long serverStartCode_;
233
234
235
236
237
238
239
240 public boolean hasServerStartCode() {
241 return ((bitField0_ & 0x00000002) == 0x00000002);
242 }
243
244
245
246
247
248
249
250 public long getServerStartCode() {
251 return serverStartCode_;
252 }
253
254
255 public static final int SERVER_CURRENT_TIME_FIELD_NUMBER = 3;
256 private long serverCurrentTime_;
257
258
259
260
261
262
263
264 public boolean hasServerCurrentTime() {
265 return ((bitField0_ & 0x00000004) == 0x00000004);
266 }
267
268
269
270
271
272
273
274 public long getServerCurrentTime() {
275 return serverCurrentTime_;
276 }
277
278
279 public static final int USE_THIS_HOSTNAME_INSTEAD_FIELD_NUMBER = 4;
280 private java.lang.Object useThisHostnameInstead_;
281
282
283
284
285
286
287
288 public boolean hasUseThisHostnameInstead() {
289 return ((bitField0_ & 0x00000008) == 0x00000008);
290 }
291
292
293
294
295
296
297
298 public java.lang.String getUseThisHostnameInstead() {
299 java.lang.Object ref = useThisHostnameInstead_;
300 if (ref instanceof java.lang.String) {
301 return (java.lang.String) ref;
302 } else {
303 com.google.protobuf.ByteString bs =
304 (com.google.protobuf.ByteString) ref;
305 java.lang.String s = bs.toStringUtf8();
306 if (bs.isValidUtf8()) {
307 useThisHostnameInstead_ = s;
308 }
309 return s;
310 }
311 }
312
313
314
315
316
317
318
319 public com.google.protobuf.ByteString
320 getUseThisHostnameInsteadBytes() {
321 java.lang.Object ref = useThisHostnameInstead_;
322 if (ref instanceof java.lang.String) {
323 com.google.protobuf.ByteString b =
324 com.google.protobuf.ByteString.copyFromUtf8(
325 (java.lang.String) ref);
326 useThisHostnameInstead_ = b;
327 return b;
328 } else {
329 return (com.google.protobuf.ByteString) ref;
330 }
331 }
332
333 private void initFields() {
334 port_ = 0;
335 serverStartCode_ = 0L;
336 serverCurrentTime_ = 0L;
337 useThisHostnameInstead_ = "";
338 }
339 private byte memoizedIsInitialized = -1;
340 public final boolean isInitialized() {
341 byte isInitialized = memoizedIsInitialized;
342 if (isInitialized != -1) return isInitialized == 1;
343
344 if (!hasPort()) {
345 memoizedIsInitialized = 0;
346 return false;
347 }
348 if (!hasServerStartCode()) {
349 memoizedIsInitialized = 0;
350 return false;
351 }
352 if (!hasServerCurrentTime()) {
353 memoizedIsInitialized = 0;
354 return false;
355 }
356 memoizedIsInitialized = 1;
357 return true;
358 }
359
360 public void writeTo(com.google.protobuf.CodedOutputStream output)
361 throws java.io.IOException {
362 getSerializedSize();
363 if (((bitField0_ & 0x00000001) == 0x00000001)) {
364 output.writeUInt32(1, port_);
365 }
366 if (((bitField0_ & 0x00000002) == 0x00000002)) {
367 output.writeUInt64(2, serverStartCode_);
368 }
369 if (((bitField0_ & 0x00000004) == 0x00000004)) {
370 output.writeUInt64(3, serverCurrentTime_);
371 }
372 if (((bitField0_ & 0x00000008) == 0x00000008)) {
373 output.writeBytes(4, getUseThisHostnameInsteadBytes());
374 }
375 getUnknownFields().writeTo(output);
376 }
377
378 private int memoizedSerializedSize = -1;
379 public int getSerializedSize() {
380 int size = memoizedSerializedSize;
381 if (size != -1) return size;
382
383 size = 0;
384 if (((bitField0_ & 0x00000001) == 0x00000001)) {
385 size += com.google.protobuf.CodedOutputStream
386 .computeUInt32Size(1, port_);
387 }
388 if (((bitField0_ & 0x00000002) == 0x00000002)) {
389 size += com.google.protobuf.CodedOutputStream
390 .computeUInt64Size(2, serverStartCode_);
391 }
392 if (((bitField0_ & 0x00000004) == 0x00000004)) {
393 size += com.google.protobuf.CodedOutputStream
394 .computeUInt64Size(3, serverCurrentTime_);
395 }
396 if (((bitField0_ & 0x00000008) == 0x00000008)) {
397 size += com.google.protobuf.CodedOutputStream
398 .computeBytesSize(4, getUseThisHostnameInsteadBytes());
399 }
400 size += getUnknownFields().getSerializedSize();
401 memoizedSerializedSize = size;
402 return size;
403 }
404
405 private static final long serialVersionUID = 0L;
406 @java.lang.Override
407 protected java.lang.Object writeReplace()
408 throws java.io.ObjectStreamException {
409 return super.writeReplace();
410 }
411
412 @java.lang.Override
413 public boolean equals(final java.lang.Object obj) {
414 if (obj == this) {
415 return true;
416 }
417 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest)) {
418 return super.equals(obj);
419 }
420 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest) obj;
421
422 boolean result = true;
423 result = result && (hasPort() == other.hasPort());
424 if (hasPort()) {
425 result = result && (getPort()
426 == other.getPort());
427 }
428 result = result && (hasServerStartCode() == other.hasServerStartCode());
429 if (hasServerStartCode()) {
430 result = result && (getServerStartCode()
431 == other.getServerStartCode());
432 }
433 result = result && (hasServerCurrentTime() == other.hasServerCurrentTime());
434 if (hasServerCurrentTime()) {
435 result = result && (getServerCurrentTime()
436 == other.getServerCurrentTime());
437 }
438 result = result && (hasUseThisHostnameInstead() == other.hasUseThisHostnameInstead());
439 if (hasUseThisHostnameInstead()) {
440 result = result && getUseThisHostnameInstead()
441 .equals(other.getUseThisHostnameInstead());
442 }
443 result = result &&
444 getUnknownFields().equals(other.getUnknownFields());
445 return result;
446 }
447
448 private int memoizedHashCode = 0;
449 @java.lang.Override
450 public int hashCode() {
451 if (memoizedHashCode != 0) {
452 return memoizedHashCode;
453 }
454 int hash = 41;
455 hash = (19 * hash) + getDescriptorForType().hashCode();
456 if (hasPort()) {
457 hash = (37 * hash) + PORT_FIELD_NUMBER;
458 hash = (53 * hash) + getPort();
459 }
460 if (hasServerStartCode()) {
461 hash = (37 * hash) + SERVER_START_CODE_FIELD_NUMBER;
462 hash = (53 * hash) + hashLong(getServerStartCode());
463 }
464 if (hasServerCurrentTime()) {
465 hash = (37 * hash) + SERVER_CURRENT_TIME_FIELD_NUMBER;
466 hash = (53 * hash) + hashLong(getServerCurrentTime());
467 }
468 if (hasUseThisHostnameInstead()) {
469 hash = (37 * hash) + USE_THIS_HOSTNAME_INSTEAD_FIELD_NUMBER;
470 hash = (53 * hash) + getUseThisHostnameInstead().hashCode();
471 }
472 hash = (29 * hash) + getUnknownFields().hashCode();
473 memoizedHashCode = hash;
474 return hash;
475 }
476
477 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom(
478 com.google.protobuf.ByteString data)
479 throws com.google.protobuf.InvalidProtocolBufferException {
480 return PARSER.parseFrom(data);
481 }
482 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom(
483 com.google.protobuf.ByteString data,
484 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
485 throws com.google.protobuf.InvalidProtocolBufferException {
486 return PARSER.parseFrom(data, extensionRegistry);
487 }
488 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom(byte[] data)
489 throws com.google.protobuf.InvalidProtocolBufferException {
490 return PARSER.parseFrom(data);
491 }
492 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom(
493 byte[] data,
494 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
495 throws com.google.protobuf.InvalidProtocolBufferException {
496 return PARSER.parseFrom(data, extensionRegistry);
497 }
498 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom(java.io.InputStream input)
499 throws java.io.IOException {
500 return PARSER.parseFrom(input);
501 }
502 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom(
503 java.io.InputStream input,
504 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
505 throws java.io.IOException {
506 return PARSER.parseFrom(input, extensionRegistry);
507 }
508 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseDelimitedFrom(java.io.InputStream input)
509 throws java.io.IOException {
510 return PARSER.parseDelimitedFrom(input);
511 }
512 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseDelimitedFrom(
513 java.io.InputStream input,
514 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
515 throws java.io.IOException {
516 return PARSER.parseDelimitedFrom(input, extensionRegistry);
517 }
518 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom(
519 com.google.protobuf.CodedInputStream input)
520 throws java.io.IOException {
521 return PARSER.parseFrom(input);
522 }
523 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom(
524 com.google.protobuf.CodedInputStream input,
525 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
526 throws java.io.IOException {
527 return PARSER.parseFrom(input, extensionRegistry);
528 }
529
530 public static Builder newBuilder() { return Builder.create(); }
531 public Builder newBuilderForType() { return newBuilder(); }
532 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest prototype) {
533 return newBuilder().mergeFrom(prototype);
534 }
535 public Builder toBuilder() { return newBuilder(this); }
536
537 @java.lang.Override
538 protected Builder newBuilderForType(
539 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
540 Builder builder = new Builder(parent);
541 return builder;
542 }
543
544
545
546 public static final class Builder extends
547 com.google.protobuf.GeneratedMessage.Builder<Builder>
548 implements org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequestOrBuilder {
549 public static final com.google.protobuf.Descriptors.Descriptor
550 getDescriptor() {
551 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerStartupRequest_descriptor;
552 }
553
554 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
555 internalGetFieldAccessorTable() {
556 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerStartupRequest_fieldAccessorTable
557 .ensureFieldAccessorsInitialized(
558 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest.class, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest.Builder.class);
559 }
560
561
562 private Builder() {
563 maybeForceBuilderInitialization();
564 }
565
566 private Builder(
567 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
568 super(parent);
569 maybeForceBuilderInitialization();
570 }
571 private void maybeForceBuilderInitialization() {
572 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
573 }
574 }
575 private static Builder create() {
576 return new Builder();
577 }
578
579 public Builder clear() {
580 super.clear();
581 port_ = 0;
582 bitField0_ = (bitField0_ & ~0x00000001);
583 serverStartCode_ = 0L;
584 bitField0_ = (bitField0_ & ~0x00000002);
585 serverCurrentTime_ = 0L;
586 bitField0_ = (bitField0_ & ~0x00000004);
587 useThisHostnameInstead_ = "";
588 bitField0_ = (bitField0_ & ~0x00000008);
589 return this;
590 }
591
592 public Builder clone() {
593 return create().mergeFrom(buildPartial());
594 }
595
596 public com.google.protobuf.Descriptors.Descriptor
597 getDescriptorForType() {
598 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerStartupRequest_descriptor;
599 }
600
601 public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest getDefaultInstanceForType() {
602 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest.getDefaultInstance();
603 }
604
605 public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest build() {
606 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest result = buildPartial();
607 if (!result.isInitialized()) {
608 throw newUninitializedMessageException(result);
609 }
610 return result;
611 }
612
613 public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest buildPartial() {
614 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest(this);
615 int from_bitField0_ = bitField0_;
616 int to_bitField0_ = 0;
617 if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
618 to_bitField0_ |= 0x00000001;
619 }
620 result.port_ = port_;
621 if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
622 to_bitField0_ |= 0x00000002;
623 }
624 result.serverStartCode_ = serverStartCode_;
625 if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
626 to_bitField0_ |= 0x00000004;
627 }
628 result.serverCurrentTime_ = serverCurrentTime_;
629 if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
630 to_bitField0_ |= 0x00000008;
631 }
632 result.useThisHostnameInstead_ = useThisHostnameInstead_;
633 result.bitField0_ = to_bitField0_;
634 onBuilt();
635 return result;
636 }
637
638 public Builder mergeFrom(com.google.protobuf.Message other) {
639 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest) {
640 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest)other);
641 } else {
642 super.mergeFrom(other);
643 return this;
644 }
645 }
646
647 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest other) {
648 if (other == org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest.getDefaultInstance()) return this;
649 if (other.hasPort()) {
650 setPort(other.getPort());
651 }
652 if (other.hasServerStartCode()) {
653 setServerStartCode(other.getServerStartCode());
654 }
655 if (other.hasServerCurrentTime()) {
656 setServerCurrentTime(other.getServerCurrentTime());
657 }
658 if (other.hasUseThisHostnameInstead()) {
659 bitField0_ |= 0x00000008;
660 useThisHostnameInstead_ = other.useThisHostnameInstead_;
661 onChanged();
662 }
663 this.mergeUnknownFields(other.getUnknownFields());
664 return this;
665 }
666
667 public final boolean isInitialized() {
668 if (!hasPort()) {
669
670 return false;
671 }
672 if (!hasServerStartCode()) {
673
674 return false;
675 }
676 if (!hasServerCurrentTime()) {
677
678 return false;
679 }
680 return true;
681 }
682
683 public Builder mergeFrom(
684 com.google.protobuf.CodedInputStream input,
685 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
686 throws java.io.IOException {
687 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parsedMessage = null;
688 try {
689 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
690 } catch (com.google.protobuf.InvalidProtocolBufferException e) {
691 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest) e.getUnfinishedMessage();
692 throw e;
693 } finally {
694 if (parsedMessage != null) {
695 mergeFrom(parsedMessage);
696 }
697 }
698 return this;
699 }
700 private int bitField0_;
701
702
703 private int port_ ;
704
705
706
707
708
709
710
711 public boolean hasPort() {
712 return ((bitField0_ & 0x00000001) == 0x00000001);
713 }
714
715
716
717
718
719
720
721 public int getPort() {
722 return port_;
723 }
724
725
726
727
728
729
730
731 public Builder setPort(int value) {
732 bitField0_ |= 0x00000001;
733 port_ = value;
734 onChanged();
735 return this;
736 }
737
738
739
740
741
742
743
744 public Builder clearPort() {
745 bitField0_ = (bitField0_ & ~0x00000001);
746 port_ = 0;
747 onChanged();
748 return this;
749 }
750
751
752 private long serverStartCode_ ;
753
754
755
756
757
758
759
760 public boolean hasServerStartCode() {
761 return ((bitField0_ & 0x00000002) == 0x00000002);
762 }
763
764
765
766
767
768
769
770 public long getServerStartCode() {
771 return serverStartCode_;
772 }
773
774
775
776
777
778
779
780 public Builder setServerStartCode(long value) {
781 bitField0_ |= 0x00000002;
782 serverStartCode_ = value;
783 onChanged();
784 return this;
785 }
786
787
788
789
790
791
792
793 public Builder clearServerStartCode() {
794 bitField0_ = (bitField0_ & ~0x00000002);
795 serverStartCode_ = 0L;
796 onChanged();
797 return this;
798 }
799
800
801 private long serverCurrentTime_ ;
802
803
804
805
806
807
808
809 public boolean hasServerCurrentTime() {
810 return ((bitField0_ & 0x00000004) == 0x00000004);
811 }
812
813
814
815
816
817
818
819 public long getServerCurrentTime() {
820 return serverCurrentTime_;
821 }
822
823
824
825
826
827
828
829 public Builder setServerCurrentTime(long value) {
830 bitField0_ |= 0x00000004;
831 serverCurrentTime_ = value;
832 onChanged();
833 return this;
834 }
835
836
837
838
839
840
841
842 public Builder clearServerCurrentTime() {
843 bitField0_ = (bitField0_ & ~0x00000004);
844 serverCurrentTime_ = 0L;
845 onChanged();
846 return this;
847 }
848
849
850 private java.lang.Object useThisHostnameInstead_ = "";
851
852
853
854
855
856
857
858 public boolean hasUseThisHostnameInstead() {
859 return ((bitField0_ & 0x00000008) == 0x00000008);
860 }
861
862
863
864
865
866
867
868 public java.lang.String getUseThisHostnameInstead() {
869 java.lang.Object ref = useThisHostnameInstead_;
870 if (!(ref instanceof java.lang.String)) {
871 java.lang.String s = ((com.google.protobuf.ByteString) ref)
872 .toStringUtf8();
873 useThisHostnameInstead_ = s;
874 return s;
875 } else {
876 return (java.lang.String) ref;
877 }
878 }
879
880
881
882
883
884
885
886 public com.google.protobuf.ByteString
887 getUseThisHostnameInsteadBytes() {
888 java.lang.Object ref = useThisHostnameInstead_;
889 if (ref instanceof String) {
890 com.google.protobuf.ByteString b =
891 com.google.protobuf.ByteString.copyFromUtf8(
892 (java.lang.String) ref);
893 useThisHostnameInstead_ = b;
894 return b;
895 } else {
896 return (com.google.protobuf.ByteString) ref;
897 }
898 }
899
900
901
902
903
904
905
906 public Builder setUseThisHostnameInstead(
907 java.lang.String value) {
908 if (value == null) {
909 throw new NullPointerException();
910 }
911 bitField0_ |= 0x00000008;
912 useThisHostnameInstead_ = value;
913 onChanged();
914 return this;
915 }
916
917
918
919
920
921
922
923 public Builder clearUseThisHostnameInstead() {
924 bitField0_ = (bitField0_ & ~0x00000008);
925 useThisHostnameInstead_ = getDefaultInstance().getUseThisHostnameInstead();
926 onChanged();
927 return this;
928 }
929
930
931
932
933
934
935
936 public Builder setUseThisHostnameInsteadBytes(
937 com.google.protobuf.ByteString value) {
938 if (value == null) {
939 throw new NullPointerException();
940 }
941 bitField0_ |= 0x00000008;
942 useThisHostnameInstead_ = value;
943 onChanged();
944 return this;
945 }
946
947
948 }
949
950 static {
951 defaultInstance = new RegionServerStartupRequest(true);
952 defaultInstance.initFields();
953 }
954
955
956 }
957
958 public interface RegionServerStartupResponseOrBuilder
959 extends com.google.protobuf.MessageOrBuilder {
960
961
962
963
964
965
966
967
968
969
970
971
972 java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair>
973 getMapEntriesList();
974
975
976
977
978
979
980
981
982
983
984 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getMapEntries(int index);
985
986
987
988
989
990
991
992
993
994
995 int getMapEntriesCount();
996
997
998
999
1000
1001
1002
1003
1004
1005
1006 java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>
1007 getMapEntriesOrBuilderList();
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getMapEntriesOrBuilder(
1019 int index);
1020 }
1021
1022
1023
1024 public static final class RegionServerStartupResponse extends
1025 com.google.protobuf.GeneratedMessage
1026 implements RegionServerStartupResponseOrBuilder {
1027
1028 private RegionServerStartupResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
1029 super(builder);
1030 this.unknownFields = builder.getUnknownFields();
1031 }
1032 private RegionServerStartupResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
1033
1034 private static final RegionServerStartupResponse defaultInstance;
1035 public static RegionServerStartupResponse getDefaultInstance() {
1036 return defaultInstance;
1037 }
1038
1039 public RegionServerStartupResponse getDefaultInstanceForType() {
1040 return defaultInstance;
1041 }
1042
1043 private final com.google.protobuf.UnknownFieldSet unknownFields;
1044 @java.lang.Override
1045 public final com.google.protobuf.UnknownFieldSet
1046 getUnknownFields() {
1047 return this.unknownFields;
1048 }
1049 private RegionServerStartupResponse(
1050 com.google.protobuf.CodedInputStream input,
1051 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1052 throws com.google.protobuf.InvalidProtocolBufferException {
1053 initFields();
1054 int mutable_bitField0_ = 0;
1055 com.google.protobuf.UnknownFieldSet.Builder unknownFields =
1056 com.google.protobuf.UnknownFieldSet.newBuilder();
1057 try {
1058 boolean done = false;
1059 while (!done) {
1060 int tag = input.readTag();
1061 switch (tag) {
1062 case 0:
1063 done = true;
1064 break;
1065 default: {
1066 if (!parseUnknownField(input, unknownFields,
1067 extensionRegistry, tag)) {
1068 done = true;
1069 }
1070 break;
1071 }
1072 case 10: {
1073 if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
1074 mapEntries_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair>();
1075 mutable_bitField0_ |= 0x00000001;
1076 }
1077 mapEntries_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.PARSER, extensionRegistry));
1078 break;
1079 }
1080 }
1081 }
1082 } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1083 throw e.setUnfinishedMessage(this);
1084 } catch (java.io.IOException e) {
1085 throw new com.google.protobuf.InvalidProtocolBufferException(
1086 e.getMessage()).setUnfinishedMessage(this);
1087 } finally {
1088 if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
1089 mapEntries_ = java.util.Collections.unmodifiableList(mapEntries_);
1090 }
1091 this.unknownFields = unknownFields.build();
1092 makeExtensionsImmutable();
1093 }
1094 }
1095 public static final com.google.protobuf.Descriptors.Descriptor
1096 getDescriptor() {
1097 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerStartupResponse_descriptor;
1098 }
1099
1100 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1101 internalGetFieldAccessorTable() {
1102 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerStartupResponse_fieldAccessorTable
1103 .ensureFieldAccessorsInitialized(
1104 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.class, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.Builder.class);
1105 }
1106
1107 public static com.google.protobuf.Parser<RegionServerStartupResponse> PARSER =
1108 new com.google.protobuf.AbstractParser<RegionServerStartupResponse>() {
1109 public RegionServerStartupResponse parsePartialFrom(
1110 com.google.protobuf.CodedInputStream input,
1111 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1112 throws com.google.protobuf.InvalidProtocolBufferException {
1113 return new RegionServerStartupResponse(input, extensionRegistry);
1114 }
1115 };
1116
1117 @java.lang.Override
1118 public com.google.protobuf.Parser<RegionServerStartupResponse> getParserForType() {
1119 return PARSER;
1120 }
1121
1122
1123 public static final int MAP_ENTRIES_FIELD_NUMBER = 1;
1124 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> mapEntries_;
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134
1135 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> getMapEntriesList() {
1136 return mapEntries_;
1137 }
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>
1149 getMapEntriesOrBuilderList() {
1150 return mapEntries_;
1151 }
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162 public int getMapEntriesCount() {
1163 return mapEntries_.size();
1164 }
1165
1166
1167
1168
1169
1170
1171
1172
1173
1174
1175 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getMapEntries(int index) {
1176 return mapEntries_.get(index);
1177 }
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
1188 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getMapEntriesOrBuilder(
1189 int index) {
1190 return mapEntries_.get(index);
1191 }
1192
1193 private void initFields() {
1194 mapEntries_ = java.util.Collections.emptyList();
1195 }
1196 private byte memoizedIsInitialized = -1;
1197 public final boolean isInitialized() {
1198 byte isInitialized = memoizedIsInitialized;
1199 if (isInitialized != -1) return isInitialized == 1;
1200
1201 for (int i = 0; i < getMapEntriesCount(); i++) {
1202 if (!getMapEntries(i).isInitialized()) {
1203 memoizedIsInitialized = 0;
1204 return false;
1205 }
1206 }
1207 memoizedIsInitialized = 1;
1208 return true;
1209 }
1210
1211 public void writeTo(com.google.protobuf.CodedOutputStream output)
1212 throws java.io.IOException {
1213 getSerializedSize();
1214 for (int i = 0; i < mapEntries_.size(); i++) {
1215 output.writeMessage(1, mapEntries_.get(i));
1216 }
1217 getUnknownFields().writeTo(output);
1218 }
1219
1220 private int memoizedSerializedSize = -1;
1221 public int getSerializedSize() {
1222 int size = memoizedSerializedSize;
1223 if (size != -1) return size;
1224
1225 size = 0;
1226 for (int i = 0; i < mapEntries_.size(); i++) {
1227 size += com.google.protobuf.CodedOutputStream
1228 .computeMessageSize(1, mapEntries_.get(i));
1229 }
1230 size += getUnknownFields().getSerializedSize();
1231 memoizedSerializedSize = size;
1232 return size;
1233 }
1234
1235 private static final long serialVersionUID = 0L;
1236 @java.lang.Override
1237 protected java.lang.Object writeReplace()
1238 throws java.io.ObjectStreamException {
1239 return super.writeReplace();
1240 }
1241
1242 @java.lang.Override
1243 public boolean equals(final java.lang.Object obj) {
1244 if (obj == this) {
1245 return true;
1246 }
1247 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse)) {
1248 return super.equals(obj);
1249 }
1250 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse) obj;
1251
1252 boolean result = true;
1253 result = result && getMapEntriesList()
1254 .equals(other.getMapEntriesList());
1255 result = result &&
1256 getUnknownFields().equals(other.getUnknownFields());
1257 return result;
1258 }
1259
1260 private int memoizedHashCode = 0;
1261 @java.lang.Override
1262 public int hashCode() {
1263 if (memoizedHashCode != 0) {
1264 return memoizedHashCode;
1265 }
1266 int hash = 41;
1267 hash = (19 * hash) + getDescriptorForType().hashCode();
1268 if (getMapEntriesCount() > 0) {
1269 hash = (37 * hash) + MAP_ENTRIES_FIELD_NUMBER;
1270 hash = (53 * hash) + getMapEntriesList().hashCode();
1271 }
1272 hash = (29 * hash) + getUnknownFields().hashCode();
1273 memoizedHashCode = hash;
1274 return hash;
1275 }
1276
1277 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom(
1278 com.google.protobuf.ByteString data)
1279 throws com.google.protobuf.InvalidProtocolBufferException {
1280 return PARSER.parseFrom(data);
1281 }
1282 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom(
1283 com.google.protobuf.ByteString data,
1284 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1285 throws com.google.protobuf.InvalidProtocolBufferException {
1286 return PARSER.parseFrom(data, extensionRegistry);
1287 }
1288 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom(byte[] data)
1289 throws com.google.protobuf.InvalidProtocolBufferException {
1290 return PARSER.parseFrom(data);
1291 }
1292 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom(
1293 byte[] data,
1294 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1295 throws com.google.protobuf.InvalidProtocolBufferException {
1296 return PARSER.parseFrom(data, extensionRegistry);
1297 }
1298 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom(java.io.InputStream input)
1299 throws java.io.IOException {
1300 return PARSER.parseFrom(input);
1301 }
1302 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom(
1303 java.io.InputStream input,
1304 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1305 throws java.io.IOException {
1306 return PARSER.parseFrom(input, extensionRegistry);
1307 }
1308 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseDelimitedFrom(java.io.InputStream input)
1309 throws java.io.IOException {
1310 return PARSER.parseDelimitedFrom(input);
1311 }
1312 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseDelimitedFrom(
1313 java.io.InputStream input,
1314 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1315 throws java.io.IOException {
1316 return PARSER.parseDelimitedFrom(input, extensionRegistry);
1317 }
1318 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom(
1319 com.google.protobuf.CodedInputStream input)
1320 throws java.io.IOException {
1321 return PARSER.parseFrom(input);
1322 }
1323 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom(
1324 com.google.protobuf.CodedInputStream input,
1325 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1326 throws java.io.IOException {
1327 return PARSER.parseFrom(input, extensionRegistry);
1328 }
1329
1330 public static Builder newBuilder() { return Builder.create(); }
1331 public Builder newBuilderForType() { return newBuilder(); }
1332 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse prototype) {
1333 return newBuilder().mergeFrom(prototype);
1334 }
1335 public Builder toBuilder() { return newBuilder(this); }
1336
1337 @java.lang.Override
1338 protected Builder newBuilderForType(
1339 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1340 Builder builder = new Builder(parent);
1341 return builder;
1342 }
1343
1344
1345
1346 public static final class Builder extends
1347 com.google.protobuf.GeneratedMessage.Builder<Builder>
1348 implements org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponseOrBuilder {
1349 public static final com.google.protobuf.Descriptors.Descriptor
1350 getDescriptor() {
1351 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerStartupResponse_descriptor;
1352 }
1353
1354 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1355 internalGetFieldAccessorTable() {
1356 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerStartupResponse_fieldAccessorTable
1357 .ensureFieldAccessorsInitialized(
1358 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.class, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.Builder.class);
1359 }
1360
1361
1362 private Builder() {
1363 maybeForceBuilderInitialization();
1364 }
1365
1366 private Builder(
1367 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1368 super(parent);
1369 maybeForceBuilderInitialization();
1370 }
1371 private void maybeForceBuilderInitialization() {
1372 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
1373 getMapEntriesFieldBuilder();
1374 }
1375 }
1376 private static Builder create() {
1377 return new Builder();
1378 }
1379
1380 public Builder clear() {
1381 super.clear();
1382 if (mapEntriesBuilder_ == null) {
1383 mapEntries_ = java.util.Collections.emptyList();
1384 bitField0_ = (bitField0_ & ~0x00000001);
1385 } else {
1386 mapEntriesBuilder_.clear();
1387 }
1388 return this;
1389 }
1390
1391 public Builder clone() {
1392 return create().mergeFrom(buildPartial());
1393 }
1394
1395 public com.google.protobuf.Descriptors.Descriptor
1396 getDescriptorForType() {
1397 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerStartupResponse_descriptor;
1398 }
1399
1400 public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse getDefaultInstanceForType() {
1401 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.getDefaultInstance();
1402 }
1403
1404 public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse build() {
1405 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse result = buildPartial();
1406 if (!result.isInitialized()) {
1407 throw newUninitializedMessageException(result);
1408 }
1409 return result;
1410 }
1411
1412 public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse buildPartial() {
1413 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse(this);
1414 int from_bitField0_ = bitField0_;
1415 if (mapEntriesBuilder_ == null) {
1416 if (((bitField0_ & 0x00000001) == 0x00000001)) {
1417 mapEntries_ = java.util.Collections.unmodifiableList(mapEntries_);
1418 bitField0_ = (bitField0_ & ~0x00000001);
1419 }
1420 result.mapEntries_ = mapEntries_;
1421 } else {
1422 result.mapEntries_ = mapEntriesBuilder_.build();
1423 }
1424 onBuilt();
1425 return result;
1426 }
1427
1428 public Builder mergeFrom(com.google.protobuf.Message other) {
1429 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse) {
1430 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse)other);
1431 } else {
1432 super.mergeFrom(other);
1433 return this;
1434 }
1435 }
1436
1437 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse other) {
1438 if (other == org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.getDefaultInstance()) return this;
1439 if (mapEntriesBuilder_ == null) {
1440 if (!other.mapEntries_.isEmpty()) {
1441 if (mapEntries_.isEmpty()) {
1442 mapEntries_ = other.mapEntries_;
1443 bitField0_ = (bitField0_ & ~0x00000001);
1444 } else {
1445 ensureMapEntriesIsMutable();
1446 mapEntries_.addAll(other.mapEntries_);
1447 }
1448 onChanged();
1449 }
1450 } else {
1451 if (!other.mapEntries_.isEmpty()) {
1452 if (mapEntriesBuilder_.isEmpty()) {
1453 mapEntriesBuilder_.dispose();
1454 mapEntriesBuilder_ = null;
1455 mapEntries_ = other.mapEntries_;
1456 bitField0_ = (bitField0_ & ~0x00000001);
1457 mapEntriesBuilder_ =
1458 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
1459 getMapEntriesFieldBuilder() : null;
1460 } else {
1461 mapEntriesBuilder_.addAllMessages(other.mapEntries_);
1462 }
1463 }
1464 }
1465 this.mergeUnknownFields(other.getUnknownFields());
1466 return this;
1467 }
1468
1469 public final boolean isInitialized() {
1470 for (int i = 0; i < getMapEntriesCount(); i++) {
1471 if (!getMapEntries(i).isInitialized()) {
1472
1473 return false;
1474 }
1475 }
1476 return true;
1477 }
1478
1479 public Builder mergeFrom(
1480 com.google.protobuf.CodedInputStream input,
1481 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1482 throws java.io.IOException {
1483 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parsedMessage = null;
1484 try {
1485 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
1486 } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1487 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse) e.getUnfinishedMessage();
1488 throw e;
1489 } finally {
1490 if (parsedMessage != null) {
1491 mergeFrom(parsedMessage);
1492 }
1493 }
1494 return this;
1495 }
1496 private int bitField0_;
1497
1498
1499 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> mapEntries_ =
1500 java.util.Collections.emptyList();
1501 private void ensureMapEntriesIsMutable() {
1502 if (!((bitField0_ & 0x00000001) == 0x00000001)) {
1503 mapEntries_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair>(mapEntries_);
1504 bitField0_ |= 0x00000001;
1505 }
1506 }
1507
1508 private com.google.protobuf.RepeatedFieldBuilder<
1509 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> mapEntriesBuilder_;
1510
1511
1512
1513
1514
1515
1516
1517
1518
1519
1520
1521 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> getMapEntriesList() {
1522 if (mapEntriesBuilder_ == null) {
1523 return java.util.Collections.unmodifiableList(mapEntries_);
1524 } else {
1525 return mapEntriesBuilder_.getMessageList();
1526 }
1527 }
1528
1529
1530
1531
1532
1533
1534
1535
1536
1537
1538 public int getMapEntriesCount() {
1539 if (mapEntriesBuilder_ == null) {
1540 return mapEntries_.size();
1541 } else {
1542 return mapEntriesBuilder_.getCount();
1543 }
1544 }
1545
1546
1547
1548
1549
1550
1551
1552
1553
1554
1555 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getMapEntries(int index) {
1556 if (mapEntriesBuilder_ == null) {
1557 return mapEntries_.get(index);
1558 } else {
1559 return mapEntriesBuilder_.getMessage(index);
1560 }
1561 }
1562
1563
1564
1565
1566
1567
1568
1569
1570
1571
1572 public Builder setMapEntries(
1573 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) {
1574 if (mapEntriesBuilder_ == null) {
1575 if (value == null) {
1576 throw new NullPointerException();
1577 }
1578 ensureMapEntriesIsMutable();
1579 mapEntries_.set(index, value);
1580 onChanged();
1581 } else {
1582 mapEntriesBuilder_.setMessage(index, value);
1583 }
1584 return this;
1585 }
1586
1587
1588
1589
1590
1591
1592
1593
1594
1595
1596 public Builder setMapEntries(
1597 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) {
1598 if (mapEntriesBuilder_ == null) {
1599 ensureMapEntriesIsMutable();
1600 mapEntries_.set(index, builderForValue.build());
1601 onChanged();
1602 } else {
1603 mapEntriesBuilder_.setMessage(index, builderForValue.build());
1604 }
1605 return this;
1606 }
1607
1608
1609
1610
1611
1612
1613
1614
1615
1616
1617 public Builder addMapEntries(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) {
1618 if (mapEntriesBuilder_ == null) {
1619 if (value == null) {
1620 throw new NullPointerException();
1621 }
1622 ensureMapEntriesIsMutable();
1623 mapEntries_.add(value);
1624 onChanged();
1625 } else {
1626 mapEntriesBuilder_.addMessage(value);
1627 }
1628 return this;
1629 }
1630
1631
1632
1633
1634
1635
1636
1637
1638
1639
1640 public Builder addMapEntries(
1641 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) {
1642 if (mapEntriesBuilder_ == null) {
1643 if (value == null) {
1644 throw new NullPointerException();
1645 }
1646 ensureMapEntriesIsMutable();
1647 mapEntries_.add(index, value);
1648 onChanged();
1649 } else {
1650 mapEntriesBuilder_.addMessage(index, value);
1651 }
1652 return this;
1653 }
1654
1655
1656
1657
1658
1659
1660
1661
1662
1663
1664 public Builder addMapEntries(
1665 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) {
1666 if (mapEntriesBuilder_ == null) {
1667 ensureMapEntriesIsMutable();
1668 mapEntries_.add(builderForValue.build());
1669 onChanged();
1670 } else {
1671 mapEntriesBuilder_.addMessage(builderForValue.build());
1672 }
1673 return this;
1674 }
1675
1676
1677
1678
1679
1680
1681
1682
1683
1684
1685 public Builder addMapEntries(
1686 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) {
1687 if (mapEntriesBuilder_ == null) {
1688 ensureMapEntriesIsMutable();
1689 mapEntries_.add(index, builderForValue.build());
1690 onChanged();
1691 } else {
1692 mapEntriesBuilder_.addMessage(index, builderForValue.build());
1693 }
1694 return this;
1695 }
1696
1697
1698
1699
1700
1701
1702
1703
1704
1705
1706 public Builder addAllMapEntries(
1707 java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> values) {
1708 if (mapEntriesBuilder_ == null) {
1709 ensureMapEntriesIsMutable();
1710 super.addAll(values, mapEntries_);
1711 onChanged();
1712 } else {
1713 mapEntriesBuilder_.addAllMessages(values);
1714 }
1715 return this;
1716 }
1717
1718
1719
1720
1721
1722
1723
1724
1725
1726
1727 public Builder clearMapEntries() {
1728 if (mapEntriesBuilder_ == null) {
1729 mapEntries_ = java.util.Collections.emptyList();
1730 bitField0_ = (bitField0_ & ~0x00000001);
1731 onChanged();
1732 } else {
1733 mapEntriesBuilder_.clear();
1734 }
1735 return this;
1736 }
1737
1738
1739
1740
1741
1742
1743
1744
1745
1746
1747 public Builder removeMapEntries(int index) {
1748 if (mapEntriesBuilder_ == null) {
1749 ensureMapEntriesIsMutable();
1750 mapEntries_.remove(index);
1751 onChanged();
1752 } else {
1753 mapEntriesBuilder_.remove(index);
1754 }
1755 return this;
1756 }
1757
1758
1759
1760
1761
1762
1763
1764
1765
1766
1767 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder getMapEntriesBuilder(
1768 int index) {
1769 return getMapEntriesFieldBuilder().getBuilder(index);
1770 }
1771
1772
1773
1774
1775
1776
1777
1778
1779
1780
1781 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getMapEntriesOrBuilder(
1782 int index) {
1783 if (mapEntriesBuilder_ == null) {
1784 return mapEntries_.get(index); } else {
1785 return mapEntriesBuilder_.getMessageOrBuilder(index);
1786 }
1787 }
1788
1789
1790
1791
1792
1793
1794
1795
1796
1797
1798 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>
1799 getMapEntriesOrBuilderList() {
1800 if (mapEntriesBuilder_ != null) {
1801 return mapEntriesBuilder_.getMessageOrBuilderList();
1802 } else {
1803 return java.util.Collections.unmodifiableList(mapEntries_);
1804 }
1805 }
1806
1807
1808
1809
1810
1811
1812
1813
1814
1815
1816 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder addMapEntriesBuilder() {
1817 return getMapEntriesFieldBuilder().addBuilder(
1818 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance());
1819 }
1820
1821
1822
1823
1824
1825
1826
1827
1828
1829
1830 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder addMapEntriesBuilder(
1831 int index) {
1832 return getMapEntriesFieldBuilder().addBuilder(
1833 index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance());
1834 }
1835
1836
1837
1838
1839
1840
1841
1842
1843
1844
1845 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder>
1846 getMapEntriesBuilderList() {
1847 return getMapEntriesFieldBuilder().getBuilderList();
1848 }
1849 private com.google.protobuf.RepeatedFieldBuilder<
1850 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>
1851 getMapEntriesFieldBuilder() {
1852 if (mapEntriesBuilder_ == null) {
1853 mapEntriesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
1854 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>(
1855 mapEntries_,
1856 ((bitField0_ & 0x00000001) == 0x00000001),
1857 getParentForChildren(),
1858 isClean());
1859 mapEntries_ = null;
1860 }
1861 return mapEntriesBuilder_;
1862 }
1863
1864
1865 }
1866
1867 static {
1868 defaultInstance = new RegionServerStartupResponse(true);
1869 defaultInstance.initFields();
1870 }
1871
1872
1873 }
1874
1875 public interface RegionServerReportRequestOrBuilder
1876 extends com.google.protobuf.MessageOrBuilder {
1877
1878
1879
1880
1881
1882 boolean hasServer();
1883
1884
1885
1886 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer();
1887
1888
1889
1890 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder();
1891
1892
1893
1894
1895
1896
1897
1898
1899
1900 boolean hasLoad();
1901
1902
1903
1904
1905
1906
1907
1908 org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ServerLoad getLoad();
1909
1910
1911
1912
1913
1914
1915
1916 org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ServerLoadOrBuilder getLoadOrBuilder();
1917 }
1918
1919
1920
1921 public static final class RegionServerReportRequest extends
1922 com.google.protobuf.GeneratedMessage
1923 implements RegionServerReportRequestOrBuilder {
1924
1925 private RegionServerReportRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
1926 super(builder);
1927 this.unknownFields = builder.getUnknownFields();
1928 }
1929 private RegionServerReportRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
1930
1931 private static final RegionServerReportRequest defaultInstance;
1932 public static RegionServerReportRequest getDefaultInstance() {
1933 return defaultInstance;
1934 }
1935
1936 public RegionServerReportRequest getDefaultInstanceForType() {
1937 return defaultInstance;
1938 }
1939
1940 private final com.google.protobuf.UnknownFieldSet unknownFields;
1941 @java.lang.Override
1942 public final com.google.protobuf.UnknownFieldSet
1943 getUnknownFields() {
1944 return this.unknownFields;
1945 }
1946 private RegionServerReportRequest(
1947 com.google.protobuf.CodedInputStream input,
1948 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1949 throws com.google.protobuf.InvalidProtocolBufferException {
1950 initFields();
1951 int mutable_bitField0_ = 0;
1952 com.google.protobuf.UnknownFieldSet.Builder unknownFields =
1953 com.google.protobuf.UnknownFieldSet.newBuilder();
1954 try {
1955 boolean done = false;
1956 while (!done) {
1957 int tag = input.readTag();
1958 switch (tag) {
1959 case 0:
1960 done = true;
1961 break;
1962 default: {
1963 if (!parseUnknownField(input, unknownFields,
1964 extensionRegistry, tag)) {
1965 done = true;
1966 }
1967 break;
1968 }
1969 case 10: {
1970 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = null;
1971 if (((bitField0_ & 0x00000001) == 0x00000001)) {
1972 subBuilder = server_.toBuilder();
1973 }
1974 server_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry);
1975 if (subBuilder != null) {
1976 subBuilder.mergeFrom(server_);
1977 server_ = subBuilder.buildPartial();
1978 }
1979 bitField0_ |= 0x00000001;
1980 break;
1981 }
1982 case 18: {
1983 org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ServerLoad.Builder subBuilder = null;
1984 if (((bitField0_ & 0x00000002) == 0x00000002)) {
1985 subBuilder = load_.toBuilder();
1986 }
1987 load_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ServerLoad.PARSER, extensionRegistry);
1988 if (subBuilder != null) {
1989 subBuilder.mergeFrom(load_);
1990 load_ = subBuilder.buildPartial();
1991 }
1992 bitField0_ |= 0x00000002;
1993 break;
1994 }
1995 }
1996 }
1997 } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1998 throw e.setUnfinishedMessage(this);
1999 } catch (java.io.IOException e) {
2000 throw new com.google.protobuf.InvalidProtocolBufferException(
2001 e.getMessage()).setUnfinishedMessage(this);
2002 } finally {
2003 this.unknownFields = unknownFields.build();
2004 makeExtensionsImmutable();
2005 }
2006 }
2007 public static final com.google.protobuf.Descriptors.Descriptor
2008 getDescriptor() {
2009 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerReportRequest_descriptor;
2010 }
2011
2012 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
2013 internalGetFieldAccessorTable() {
2014 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerReportRequest_fieldAccessorTable
2015 .ensureFieldAccessorsInitialized(
2016 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest.class, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest.Builder.class);
2017 }
2018
2019 public static com.google.protobuf.Parser<RegionServerReportRequest> PARSER =
2020 new com.google.protobuf.AbstractParser<RegionServerReportRequest>() {
2021 public RegionServerReportRequest parsePartialFrom(
2022 com.google.protobuf.CodedInputStream input,
2023 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2024 throws com.google.protobuf.InvalidProtocolBufferException {
2025 return new RegionServerReportRequest(input, extensionRegistry);
2026 }
2027 };
2028
2029 @java.lang.Override
2030 public com.google.protobuf.Parser<RegionServerReportRequest> getParserForType() {
2031 return PARSER;
2032 }
2033
2034 private int bitField0_;
2035
2036 public static final int SERVER_FIELD_NUMBER = 1;
2037 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName server_;
2038
2039
2040
2041 public boolean hasServer() {
2042 return ((bitField0_ & 0x00000001) == 0x00000001);
2043 }
2044
2045
2046
2047 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer() {
2048 return server_;
2049 }
2050
2051
2052
2053 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() {
2054 return server_;
2055 }
2056
2057
2058 public static final int LOAD_FIELD_NUMBER = 2;
2059 private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ServerLoad load_;
2060
2061
2062
2063
2064
2065
2066
2067 public boolean hasLoad() {
2068 return ((bitField0_ & 0x00000002) == 0x00000002);
2069 }
2070
2071
2072
2073
2074
2075
2076
2077 public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ServerLoad getLoad() {
2078 return load_;
2079 }
2080
2081
2082
2083
2084
2085
2086
2087 public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ServerLoadOrBuilder getLoadOrBuilder() {
2088 return load_;
2089 }
2090
2091 private void initFields() {
2092 server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
2093 load_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ServerLoad.getDefaultInstance();
2094 }
2095 private byte memoizedIsInitialized = -1;
2096 public final boolean isInitialized() {
2097 byte isInitialized = memoizedIsInitialized;
2098 if (isInitialized != -1) return isInitialized == 1;
2099
2100 if (!hasServer()) {
2101 memoizedIsInitialized = 0;
2102 return false;
2103 }
2104 if (!getServer().isInitialized()) {
2105 memoizedIsInitialized = 0;
2106 return false;
2107 }
2108 if (hasLoad()) {
2109 if (!getLoad().isInitialized()) {
2110 memoizedIsInitialized = 0;
2111 return false;
2112 }
2113 }
2114 memoizedIsInitialized = 1;
2115 return true;
2116 }
2117
2118 public void writeTo(com.google.protobuf.CodedOutputStream output)
2119 throws java.io.IOException {
2120 getSerializedSize();
2121 if (((bitField0_ & 0x00000001) == 0x00000001)) {
2122 output.writeMessage(1, server_);
2123 }
2124 if (((bitField0_ & 0x00000002) == 0x00000002)) {
2125 output.writeMessage(2, load_);
2126 }
2127 getUnknownFields().writeTo(output);
2128 }
2129
2130 private int memoizedSerializedSize = -1;
2131 public int getSerializedSize() {
2132 int size = memoizedSerializedSize;
2133 if (size != -1) return size;
2134
2135 size = 0;
2136 if (((bitField0_ & 0x00000001) == 0x00000001)) {
2137 size += com.google.protobuf.CodedOutputStream
2138 .computeMessageSize(1, server_);
2139 }
2140 if (((bitField0_ & 0x00000002) == 0x00000002)) {
2141 size += com.google.protobuf.CodedOutputStream
2142 .computeMessageSize(2, load_);
2143 }
2144 size += getUnknownFields().getSerializedSize();
2145 memoizedSerializedSize = size;
2146 return size;
2147 }
2148
2149 private static final long serialVersionUID = 0L;
2150 @java.lang.Override
2151 protected java.lang.Object writeReplace()
2152 throws java.io.ObjectStreamException {
2153 return super.writeReplace();
2154 }
2155
2156 @java.lang.Override
2157 public boolean equals(final java.lang.Object obj) {
2158 if (obj == this) {
2159 return true;
2160 }
2161 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest)) {
2162 return super.equals(obj);
2163 }
2164 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest) obj;
2165
2166 boolean result = true;
2167 result = result && (hasServer() == other.hasServer());
2168 if (hasServer()) {
2169 result = result && getServer()
2170 .equals(other.getServer());
2171 }
2172 result = result && (hasLoad() == other.hasLoad());
2173 if (hasLoad()) {
2174 result = result && getLoad()
2175 .equals(other.getLoad());
2176 }
2177 result = result &&
2178 getUnknownFields().equals(other.getUnknownFields());
2179 return result;
2180 }
2181
2182 private int memoizedHashCode = 0;
2183 @java.lang.Override
2184 public int hashCode() {
2185 if (memoizedHashCode != 0) {
2186 return memoizedHashCode;
2187 }
2188 int hash = 41;
2189 hash = (19 * hash) + getDescriptorForType().hashCode();
2190 if (hasServer()) {
2191 hash = (37 * hash) + SERVER_FIELD_NUMBER;
2192 hash = (53 * hash) + getServer().hashCode();
2193 }
2194 if (hasLoad()) {
2195 hash = (37 * hash) + LOAD_FIELD_NUMBER;
2196 hash = (53 * hash) + getLoad().hashCode();
2197 }
2198 hash = (29 * hash) + getUnknownFields().hashCode();
2199 memoizedHashCode = hash;
2200 return hash;
2201 }
2202
2203 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom(
2204 com.google.protobuf.ByteString data)
2205 throws com.google.protobuf.InvalidProtocolBufferException {
2206 return PARSER.parseFrom(data);
2207 }
2208 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom(
2209 com.google.protobuf.ByteString data,
2210 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2211 throws com.google.protobuf.InvalidProtocolBufferException {
2212 return PARSER.parseFrom(data, extensionRegistry);
2213 }
2214 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom(byte[] data)
2215 throws com.google.protobuf.InvalidProtocolBufferException {
2216 return PARSER.parseFrom(data);
2217 }
2218 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom(
2219 byte[] data,
2220 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2221 throws com.google.protobuf.InvalidProtocolBufferException {
2222 return PARSER.parseFrom(data, extensionRegistry);
2223 }
2224 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom(java.io.InputStream input)
2225 throws java.io.IOException {
2226 return PARSER.parseFrom(input);
2227 }
2228 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom(
2229 java.io.InputStream input,
2230 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2231 throws java.io.IOException {
2232 return PARSER.parseFrom(input, extensionRegistry);
2233 }
2234 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseDelimitedFrom(java.io.InputStream input)
2235 throws java.io.IOException {
2236 return PARSER.parseDelimitedFrom(input);
2237 }
2238 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseDelimitedFrom(
2239 java.io.InputStream input,
2240 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2241 throws java.io.IOException {
2242 return PARSER.parseDelimitedFrom(input, extensionRegistry);
2243 }
2244 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom(
2245 com.google.protobuf.CodedInputStream input)
2246 throws java.io.IOException {
2247 return PARSER.parseFrom(input);
2248 }
2249 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom(
2250 com.google.protobuf.CodedInputStream input,
2251 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2252 throws java.io.IOException {
2253 return PARSER.parseFrom(input, extensionRegistry);
2254 }
2255
2256 public static Builder newBuilder() { return Builder.create(); }
2257 public Builder newBuilderForType() { return newBuilder(); }
2258 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest prototype) {
2259 return newBuilder().mergeFrom(prototype);
2260 }
2261 public Builder toBuilder() { return newBuilder(this); }
2262
2263 @java.lang.Override
2264 protected Builder newBuilderForType(
2265 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2266 Builder builder = new Builder(parent);
2267 return builder;
2268 }
2269
2270
2271
2272 public static final class Builder extends
2273 com.google.protobuf.GeneratedMessage.Builder<Builder>
2274 implements org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequestOrBuilder {
2275 public static final com.google.protobuf.Descriptors.Descriptor
2276 getDescriptor() {
2277 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerReportRequest_descriptor;
2278 }
2279
2280 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
2281 internalGetFieldAccessorTable() {
2282 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerReportRequest_fieldAccessorTable
2283 .ensureFieldAccessorsInitialized(
2284 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest.class, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest.Builder.class);
2285 }
2286
2287
2288 private Builder() {
2289 maybeForceBuilderInitialization();
2290 }
2291
2292 private Builder(
2293 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2294 super(parent);
2295 maybeForceBuilderInitialization();
2296 }
2297 private void maybeForceBuilderInitialization() {
2298 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
2299 getServerFieldBuilder();
2300 getLoadFieldBuilder();
2301 }
2302 }
2303 private static Builder create() {
2304 return new Builder();
2305 }
2306
2307 public Builder clear() {
2308 super.clear();
2309 if (serverBuilder_ == null) {
2310 server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
2311 } else {
2312 serverBuilder_.clear();
2313 }
2314 bitField0_ = (bitField0_ & ~0x00000001);
2315 if (loadBuilder_ == null) {
2316 load_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ServerLoad.getDefaultInstance();
2317 } else {
2318 loadBuilder_.clear();
2319 }
2320 bitField0_ = (bitField0_ & ~0x00000002);
2321 return this;
2322 }
2323
2324 public Builder clone() {
2325 return create().mergeFrom(buildPartial());
2326 }
2327
2328 public com.google.protobuf.Descriptors.Descriptor
2329 getDescriptorForType() {
2330 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerReportRequest_descriptor;
2331 }
2332
2333 public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest getDefaultInstanceForType() {
2334 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest.getDefaultInstance();
2335 }
2336
2337 public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest build() {
2338 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest result = buildPartial();
2339 if (!result.isInitialized()) {
2340 throw newUninitializedMessageException(result);
2341 }
2342 return result;
2343 }
2344
2345 public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest buildPartial() {
2346 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest(this);
2347 int from_bitField0_ = bitField0_;
2348 int to_bitField0_ = 0;
2349 if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
2350 to_bitField0_ |= 0x00000001;
2351 }
2352 if (serverBuilder_ == null) {
2353 result.server_ = server_;
2354 } else {
2355 result.server_ = serverBuilder_.build();
2356 }
2357 if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
2358 to_bitField0_ |= 0x00000002;
2359 }
2360 if (loadBuilder_ == null) {
2361 result.load_ = load_;
2362 } else {
2363 result.load_ = loadBuilder_.build();
2364 }
2365 result.bitField0_ = to_bitField0_;
2366 onBuilt();
2367 return result;
2368 }
2369
2370 public Builder mergeFrom(com.google.protobuf.Message other) {
2371 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest) {
2372 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest)other);
2373 } else {
2374 super.mergeFrom(other);
2375 return this;
2376 }
2377 }
2378
2379 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest other) {
2380 if (other == org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest.getDefaultInstance()) return this;
2381 if (other.hasServer()) {
2382 mergeServer(other.getServer());
2383 }
2384 if (other.hasLoad()) {
2385 mergeLoad(other.getLoad());
2386 }
2387 this.mergeUnknownFields(other.getUnknownFields());
2388 return this;
2389 }
2390
2391 public final boolean isInitialized() {
2392 if (!hasServer()) {
2393
2394 return false;
2395 }
2396 if (!getServer().isInitialized()) {
2397
2398 return false;
2399 }
2400 if (hasLoad()) {
2401 if (!getLoad().isInitialized()) {
2402
2403 return false;
2404 }
2405 }
2406 return true;
2407 }
2408
2409 public Builder mergeFrom(
2410 com.google.protobuf.CodedInputStream input,
2411 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2412 throws java.io.IOException {
2413 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parsedMessage = null;
2414 try {
2415 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
2416 } catch (com.google.protobuf.InvalidProtocolBufferException e) {
2417 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest) e.getUnfinishedMessage();
2418 throw e;
2419 } finally {
2420 if (parsedMessage != null) {
2421 mergeFrom(parsedMessage);
2422 }
2423 }
2424 return this;
2425 }
2426 private int bitField0_;
2427
2428
2429 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
2430 private com.google.protobuf.SingleFieldBuilder<
2431 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverBuilder_;
2432
2433
2434
2435 public boolean hasServer() {
2436 return ((bitField0_ & 0x00000001) == 0x00000001);
2437 }
2438
2439
2440
2441 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer() {
2442 if (serverBuilder_ == null) {
2443 return server_;
2444 } else {
2445 return serverBuilder_.getMessage();
2446 }
2447 }
2448
2449
2450
2451 public Builder setServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) {
2452 if (serverBuilder_ == null) {
2453 if (value == null) {
2454 throw new NullPointerException();
2455 }
2456 server_ = value;
2457 onChanged();
2458 } else {
2459 serverBuilder_.setMessage(value);
2460 }
2461 bitField0_ |= 0x00000001;
2462 return this;
2463 }
2464
2465
2466
2467 public Builder setServer(
2468 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) {
2469 if (serverBuilder_ == null) {
2470 server_ = builderForValue.build();
2471 onChanged();
2472 } else {
2473 serverBuilder_.setMessage(builderForValue.build());
2474 }
2475 bitField0_ |= 0x00000001;
2476 return this;
2477 }
2478
2479
2480
2481 public Builder mergeServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) {
2482 if (serverBuilder_ == null) {
2483 if (((bitField0_ & 0x00000001) == 0x00000001) &&
2484 server_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) {
2485 server_ =
2486 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(server_).mergeFrom(value).buildPartial();
2487 } else {
2488 server_ = value;
2489 }
2490 onChanged();
2491 } else {
2492 serverBuilder_.mergeFrom(value);
2493 }
2494 bitField0_ |= 0x00000001;
2495 return this;
2496 }
2497
2498
2499
2500 public Builder clearServer() {
2501 if (serverBuilder_ == null) {
2502 server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
2503 onChanged();
2504 } else {
2505 serverBuilder_.clear();
2506 }
2507 bitField0_ = (bitField0_ & ~0x00000001);
2508 return this;
2509 }
2510
2511
2512
2513 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getServerBuilder() {
2514 bitField0_ |= 0x00000001;
2515 onChanged();
2516 return getServerFieldBuilder().getBuilder();
2517 }
2518
2519
2520
2521 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() {
2522 if (serverBuilder_ != null) {
2523 return serverBuilder_.getMessageOrBuilder();
2524 } else {
2525 return server_;
2526 }
2527 }
2528
2529
2530
2531 private com.google.protobuf.SingleFieldBuilder<
2532 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>
2533 getServerFieldBuilder() {
2534 if (serverBuilder_ == null) {
2535 serverBuilder_ = new com.google.protobuf.SingleFieldBuilder<
2536 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>(
2537 server_,
2538 getParentForChildren(),
2539 isClean());
2540 server_ = null;
2541 }
2542 return serverBuilder_;
2543 }
2544
2545
2546 private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ServerLoad load_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ServerLoad.getDefaultInstance();
2547 private com.google.protobuf.SingleFieldBuilder<
2548 org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ServerLoad, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ServerLoad.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ServerLoadOrBuilder> loadBuilder_;
2549
2550
2551
2552
2553
2554
2555
2556 public boolean hasLoad() {
2557 return ((bitField0_ & 0x00000002) == 0x00000002);
2558 }
2559
2560
2561
2562
2563
2564
2565
2566 public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ServerLoad getLoad() {
2567 if (loadBuilder_ == null) {
2568 return load_;
2569 } else {
2570 return loadBuilder_.getMessage();
2571 }
2572 }
2573
2574
2575
2576
2577
2578
2579
2580 public Builder setLoad(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ServerLoad value) {
2581 if (loadBuilder_ == null) {
2582 if (value == null) {
2583 throw new NullPointerException();
2584 }
2585 load_ = value;
2586 onChanged();
2587 } else {
2588 loadBuilder_.setMessage(value);
2589 }
2590 bitField0_ |= 0x00000002;
2591 return this;
2592 }
2593
2594
2595
2596
2597
2598
2599
2600 public Builder setLoad(
2601 org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ServerLoad.Builder builderForValue) {
2602 if (loadBuilder_ == null) {
2603 load_ = builderForValue.build();
2604 onChanged();
2605 } else {
2606 loadBuilder_.setMessage(builderForValue.build());
2607 }
2608 bitField0_ |= 0x00000002;
2609 return this;
2610 }
2611
2612
2613
2614
2615
2616
2617
2618 public Builder mergeLoad(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ServerLoad value) {
2619 if (loadBuilder_ == null) {
2620 if (((bitField0_ & 0x00000002) == 0x00000002) &&
2621 load_ != org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ServerLoad.getDefaultInstance()) {
2622 load_ =
2623 org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ServerLoad.newBuilder(load_).mergeFrom(value).buildPartial();
2624 } else {
2625 load_ = value;
2626 }
2627 onChanged();
2628 } else {
2629 loadBuilder_.mergeFrom(value);
2630 }
2631 bitField0_ |= 0x00000002;
2632 return this;
2633 }
2634
2635
2636
2637
2638
2639
2640
2641 public Builder clearLoad() {
2642 if (loadBuilder_ == null) {
2643 load_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ServerLoad.getDefaultInstance();
2644 onChanged();
2645 } else {
2646 loadBuilder_.clear();
2647 }
2648 bitField0_ = (bitField0_ & ~0x00000002);
2649 return this;
2650 }
2651
2652
2653
2654
2655
2656
2657
2658 public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ServerLoad.Builder getLoadBuilder() {
2659 bitField0_ |= 0x00000002;
2660 onChanged();
2661 return getLoadFieldBuilder().getBuilder();
2662 }
2663
2664
2665
2666
2667
2668
2669
2670 public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ServerLoadOrBuilder getLoadOrBuilder() {
2671 if (loadBuilder_ != null) {
2672 return loadBuilder_.getMessageOrBuilder();
2673 } else {
2674 return load_;
2675 }
2676 }
2677
2678
2679
2680
2681
2682
2683
2684 private com.google.protobuf.SingleFieldBuilder<
2685 org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ServerLoad, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ServerLoad.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ServerLoadOrBuilder>
2686 getLoadFieldBuilder() {
2687 if (loadBuilder_ == null) {
2688 loadBuilder_ = new com.google.protobuf.SingleFieldBuilder<
2689 org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ServerLoad, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ServerLoad.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ServerLoadOrBuilder>(
2690 load_,
2691 getParentForChildren(),
2692 isClean());
2693 load_ = null;
2694 }
2695 return loadBuilder_;
2696 }
2697
2698
2699 }
2700
2701 static {
2702 defaultInstance = new RegionServerReportRequest(true);
2703 defaultInstance.initFields();
2704 }
2705
2706
2707 }
2708
2709 public interface RegionServerReportResponseOrBuilder
2710 extends com.google.protobuf.MessageOrBuilder {
2711 }
2712
2713
2714
2715 public static final class RegionServerReportResponse extends
2716 com.google.protobuf.GeneratedMessage
2717 implements RegionServerReportResponseOrBuilder {
2718
2719 private RegionServerReportResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
2720 super(builder);
2721 this.unknownFields = builder.getUnknownFields();
2722 }
2723 private RegionServerReportResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
2724
2725 private static final RegionServerReportResponse defaultInstance;
2726 public static RegionServerReportResponse getDefaultInstance() {
2727 return defaultInstance;
2728 }
2729
2730 public RegionServerReportResponse getDefaultInstanceForType() {
2731 return defaultInstance;
2732 }
2733
2734 private final com.google.protobuf.UnknownFieldSet unknownFields;
2735 @java.lang.Override
2736 public final com.google.protobuf.UnknownFieldSet
2737 getUnknownFields() {
2738 return this.unknownFields;
2739 }
2740 private RegionServerReportResponse(
2741 com.google.protobuf.CodedInputStream input,
2742 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2743 throws com.google.protobuf.InvalidProtocolBufferException {
2744 initFields();
2745 com.google.protobuf.UnknownFieldSet.Builder unknownFields =
2746 com.google.protobuf.UnknownFieldSet.newBuilder();
2747 try {
2748 boolean done = false;
2749 while (!done) {
2750 int tag = input.readTag();
2751 switch (tag) {
2752 case 0:
2753 done = true;
2754 break;
2755 default: {
2756 if (!parseUnknownField(input, unknownFields,
2757 extensionRegistry, tag)) {
2758 done = true;
2759 }
2760 break;
2761 }
2762 }
2763 }
2764 } catch (com.google.protobuf.InvalidProtocolBufferException e) {
2765 throw e.setUnfinishedMessage(this);
2766 } catch (java.io.IOException e) {
2767 throw new com.google.protobuf.InvalidProtocolBufferException(
2768 e.getMessage()).setUnfinishedMessage(this);
2769 } finally {
2770 this.unknownFields = unknownFields.build();
2771 makeExtensionsImmutable();
2772 }
2773 }
2774 public static final com.google.protobuf.Descriptors.Descriptor
2775 getDescriptor() {
2776 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerReportResponse_descriptor;
2777 }
2778
2779 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
2780 internalGetFieldAccessorTable() {
2781 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerReportResponse_fieldAccessorTable
2782 .ensureFieldAccessorsInitialized(
2783 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse.class, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse.Builder.class);
2784 }
2785
2786 public static com.google.protobuf.Parser<RegionServerReportResponse> PARSER =
2787 new com.google.protobuf.AbstractParser<RegionServerReportResponse>() {
2788 public RegionServerReportResponse parsePartialFrom(
2789 com.google.protobuf.CodedInputStream input,
2790 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2791 throws com.google.protobuf.InvalidProtocolBufferException {
2792 return new RegionServerReportResponse(input, extensionRegistry);
2793 }
2794 };
2795
2796 @java.lang.Override
2797 public com.google.protobuf.Parser<RegionServerReportResponse> getParserForType() {
2798 return PARSER;
2799 }
2800
2801 private void initFields() {
2802 }
2803 private byte memoizedIsInitialized = -1;
2804 public final boolean isInitialized() {
2805 byte isInitialized = memoizedIsInitialized;
2806 if (isInitialized != -1) return isInitialized == 1;
2807
2808 memoizedIsInitialized = 1;
2809 return true;
2810 }
2811
2812 public void writeTo(com.google.protobuf.CodedOutputStream output)
2813 throws java.io.IOException {
2814 getSerializedSize();
2815 getUnknownFields().writeTo(output);
2816 }
2817
2818 private int memoizedSerializedSize = -1;
2819 public int getSerializedSize() {
2820 int size = memoizedSerializedSize;
2821 if (size != -1) return size;
2822
2823 size = 0;
2824 size += getUnknownFields().getSerializedSize();
2825 memoizedSerializedSize = size;
2826 return size;
2827 }
2828
2829 private static final long serialVersionUID = 0L;
2830 @java.lang.Override
2831 protected java.lang.Object writeReplace()
2832 throws java.io.ObjectStreamException {
2833 return super.writeReplace();
2834 }
2835
2836 @java.lang.Override
2837 public boolean equals(final java.lang.Object obj) {
2838 if (obj == this) {
2839 return true;
2840 }
2841 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse)) {
2842 return super.equals(obj);
2843 }
2844 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse) obj;
2845
2846 boolean result = true;
2847 result = result &&
2848 getUnknownFields().equals(other.getUnknownFields());
2849 return result;
2850 }
2851
2852 private int memoizedHashCode = 0;
2853 @java.lang.Override
2854 public int hashCode() {
2855 if (memoizedHashCode != 0) {
2856 return memoizedHashCode;
2857 }
2858 int hash = 41;
2859 hash = (19 * hash) + getDescriptorForType().hashCode();
2860 hash = (29 * hash) + getUnknownFields().hashCode();
2861 memoizedHashCode = hash;
2862 return hash;
2863 }
2864
2865 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse parseFrom(
2866 com.google.protobuf.ByteString data)
2867 throws com.google.protobuf.InvalidProtocolBufferException {
2868 return PARSER.parseFrom(data);
2869 }
2870 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse parseFrom(
2871 com.google.protobuf.ByteString data,
2872 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2873 throws com.google.protobuf.InvalidProtocolBufferException {
2874 return PARSER.parseFrom(data, extensionRegistry);
2875 }
2876 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse parseFrom(byte[] data)
2877 throws com.google.protobuf.InvalidProtocolBufferException {
2878 return PARSER.parseFrom(data);
2879 }
2880 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse parseFrom(
2881 byte[] data,
2882 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2883 throws com.google.protobuf.InvalidProtocolBufferException {
2884 return PARSER.parseFrom(data, extensionRegistry);
2885 }
2886 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse parseFrom(java.io.InputStream input)
2887 throws java.io.IOException {
2888 return PARSER.parseFrom(input);
2889 }
2890 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse parseFrom(
2891 java.io.InputStream input,
2892 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2893 throws java.io.IOException {
2894 return PARSER.parseFrom(input, extensionRegistry);
2895 }
2896 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse parseDelimitedFrom(java.io.InputStream input)
2897 throws java.io.IOException {
2898 return PARSER.parseDelimitedFrom(input);
2899 }
2900 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse parseDelimitedFrom(
2901 java.io.InputStream input,
2902 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2903 throws java.io.IOException {
2904 return PARSER.parseDelimitedFrom(input, extensionRegistry);
2905 }
2906 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse parseFrom(
2907 com.google.protobuf.CodedInputStream input)
2908 throws java.io.IOException {
2909 return PARSER.parseFrom(input);
2910 }
2911 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse parseFrom(
2912 com.google.protobuf.CodedInputStream input,
2913 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2914 throws java.io.IOException {
2915 return PARSER.parseFrom(input, extensionRegistry);
2916 }
2917
2918 public static Builder newBuilder() { return Builder.create(); }
2919 public Builder newBuilderForType() { return newBuilder(); }
2920 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse prototype) {
2921 return newBuilder().mergeFrom(prototype);
2922 }
2923 public Builder toBuilder() { return newBuilder(this); }
2924
2925 @java.lang.Override
2926 protected Builder newBuilderForType(
2927 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2928 Builder builder = new Builder(parent);
2929 return builder;
2930 }
2931
2932
2933
2934 public static final class Builder extends
2935 com.google.protobuf.GeneratedMessage.Builder<Builder>
2936 implements org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponseOrBuilder {
2937 public static final com.google.protobuf.Descriptors.Descriptor
2938 getDescriptor() {
2939 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerReportResponse_descriptor;
2940 }
2941
2942 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
2943 internalGetFieldAccessorTable() {
2944 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerReportResponse_fieldAccessorTable
2945 .ensureFieldAccessorsInitialized(
2946 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse.class, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse.Builder.class);
2947 }
2948
2949
2950 private Builder() {
2951 maybeForceBuilderInitialization();
2952 }
2953
2954 private Builder(
2955 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2956 super(parent);
2957 maybeForceBuilderInitialization();
2958 }
2959 private void maybeForceBuilderInitialization() {
2960 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
2961 }
2962 }
2963 private static Builder create() {
2964 return new Builder();
2965 }
2966
2967 public Builder clear() {
2968 super.clear();
2969 return this;
2970 }
2971
2972 public Builder clone() {
2973 return create().mergeFrom(buildPartial());
2974 }
2975
2976 public com.google.protobuf.Descriptors.Descriptor
2977 getDescriptorForType() {
2978 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerReportResponse_descriptor;
2979 }
2980
2981 public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse getDefaultInstanceForType() {
2982 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse.getDefaultInstance();
2983 }
2984
2985 public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse build() {
2986 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse result = buildPartial();
2987 if (!result.isInitialized()) {
2988 throw newUninitializedMessageException(result);
2989 }
2990 return result;
2991 }
2992
2993 public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse buildPartial() {
2994 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse(this);
2995 onBuilt();
2996 return result;
2997 }
2998
2999 public Builder mergeFrom(com.google.protobuf.Message other) {
3000 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse) {
3001 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse)other);
3002 } else {
3003 super.mergeFrom(other);
3004 return this;
3005 }
3006 }
3007
3008 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse other) {
3009 if (other == org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse.getDefaultInstance()) return this;
3010 this.mergeUnknownFields(other.getUnknownFields());
3011 return this;
3012 }
3013
3014 public final boolean isInitialized() {
3015 return true;
3016 }
3017
3018 public Builder mergeFrom(
3019 com.google.protobuf.CodedInputStream input,
3020 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3021 throws java.io.IOException {
3022 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse parsedMessage = null;
3023 try {
3024 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
3025 } catch (com.google.protobuf.InvalidProtocolBufferException e) {
3026 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse) e.getUnfinishedMessage();
3027 throw e;
3028 } finally {
3029 if (parsedMessage != null) {
3030 mergeFrom(parsedMessage);
3031 }
3032 }
3033 return this;
3034 }
3035
3036
3037 }
3038
3039 static {
3040 defaultInstance = new RegionServerReportResponse(true);
3041 defaultInstance.initFields();
3042 }
3043
3044
3045 }
3046
3047 public interface ReportRSFatalErrorRequestOrBuilder
3048 extends com.google.protobuf.MessageOrBuilder {
3049
3050
3051
3052
3053
3054
3055
3056
3057
3058 boolean hasServer();
3059
3060
3061
3062
3063
3064
3065
3066 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer();
3067
3068
3069
3070
3071
3072
3073
3074 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder();
3075
3076
3077
3078
3079
3080
3081
3082
3083
3084 boolean hasErrorMessage();
3085
3086
3087
3088
3089
3090
3091
3092 java.lang.String getErrorMessage();
3093
3094
3095
3096
3097
3098
3099
3100 com.google.protobuf.ByteString
3101 getErrorMessageBytes();
3102 }
3103
3104
3105
3106 public static final class ReportRSFatalErrorRequest extends
3107 com.google.protobuf.GeneratedMessage
3108 implements ReportRSFatalErrorRequestOrBuilder {
3109
3110 private ReportRSFatalErrorRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
3111 super(builder);
3112 this.unknownFields = builder.getUnknownFields();
3113 }
3114 private ReportRSFatalErrorRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
3115
3116 private static final ReportRSFatalErrorRequest defaultInstance;
3117 public static ReportRSFatalErrorRequest getDefaultInstance() {
3118 return defaultInstance;
3119 }
3120
3121 public ReportRSFatalErrorRequest getDefaultInstanceForType() {
3122 return defaultInstance;
3123 }
3124
3125 private final com.google.protobuf.UnknownFieldSet unknownFields;
3126 @java.lang.Override
3127 public final com.google.protobuf.UnknownFieldSet
3128 getUnknownFields() {
3129 return this.unknownFields;
3130 }
3131 private ReportRSFatalErrorRequest(
3132 com.google.protobuf.CodedInputStream input,
3133 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3134 throws com.google.protobuf.InvalidProtocolBufferException {
3135 initFields();
3136 int mutable_bitField0_ = 0;
3137 com.google.protobuf.UnknownFieldSet.Builder unknownFields =
3138 com.google.protobuf.UnknownFieldSet.newBuilder();
3139 try {
3140 boolean done = false;
3141 while (!done) {
3142 int tag = input.readTag();
3143 switch (tag) {
3144 case 0:
3145 done = true;
3146 break;
3147 default: {
3148 if (!parseUnknownField(input, unknownFields,
3149 extensionRegistry, tag)) {
3150 done = true;
3151 }
3152 break;
3153 }
3154 case 10: {
3155 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = null;
3156 if (((bitField0_ & 0x00000001) == 0x00000001)) {
3157 subBuilder = server_.toBuilder();
3158 }
3159 server_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry);
3160 if (subBuilder != null) {
3161 subBuilder.mergeFrom(server_);
3162 server_ = subBuilder.buildPartial();
3163 }
3164 bitField0_ |= 0x00000001;
3165 break;
3166 }
3167 case 18: {
3168 bitField0_ |= 0x00000002;
3169 errorMessage_ = input.readBytes();
3170 break;
3171 }
3172 }
3173 }
3174 } catch (com.google.protobuf.InvalidProtocolBufferException e) {
3175 throw e.setUnfinishedMessage(this);
3176 } catch (java.io.IOException e) {
3177 throw new com.google.protobuf.InvalidProtocolBufferException(
3178 e.getMessage()).setUnfinishedMessage(this);
3179 } finally {
3180 this.unknownFields = unknownFields.build();
3181 makeExtensionsImmutable();
3182 }
3183 }
3184 public static final com.google.protobuf.Descriptors.Descriptor
3185 getDescriptor() {
3186 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_ReportRSFatalErrorRequest_descriptor;
3187 }
3188
3189 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
3190 internalGetFieldAccessorTable() {
3191 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_ReportRSFatalErrorRequest_fieldAccessorTable
3192 .ensureFieldAccessorsInitialized(
3193 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest.class, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest.Builder.class);
3194 }
3195
3196 public static com.google.protobuf.Parser<ReportRSFatalErrorRequest> PARSER =
3197 new com.google.protobuf.AbstractParser<ReportRSFatalErrorRequest>() {
3198 public ReportRSFatalErrorRequest parsePartialFrom(
3199 com.google.protobuf.CodedInputStream input,
3200 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3201 throws com.google.protobuf.InvalidProtocolBufferException {
3202 return new ReportRSFatalErrorRequest(input, extensionRegistry);
3203 }
3204 };
3205
3206 @java.lang.Override
3207 public com.google.protobuf.Parser<ReportRSFatalErrorRequest> getParserForType() {
3208 return PARSER;
3209 }
3210
3211 private int bitField0_;
3212
3213 public static final int SERVER_FIELD_NUMBER = 1;
3214 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName server_;
3215
3216
3217
3218
3219
3220
3221
3222 public boolean hasServer() {
3223 return ((bitField0_ & 0x00000001) == 0x00000001);
3224 }
3225
3226
3227
3228
3229
3230
3231
3232 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer() {
3233 return server_;
3234 }
3235
3236
3237
3238
3239
3240
3241
3242 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() {
3243 return server_;
3244 }
3245
3246
3247 public static final int ERROR_MESSAGE_FIELD_NUMBER = 2;
3248 private java.lang.Object errorMessage_;
3249
3250
3251
3252
3253
3254
3255
3256 public boolean hasErrorMessage() {
3257 return ((bitField0_ & 0x00000002) == 0x00000002);
3258 }
3259
3260
3261
3262
3263
3264
3265
3266 public java.lang.String getErrorMessage() {
3267 java.lang.Object ref = errorMessage_;
3268 if (ref instanceof java.lang.String) {
3269 return (java.lang.String) ref;
3270 } else {
3271 com.google.protobuf.ByteString bs =
3272 (com.google.protobuf.ByteString) ref;
3273 java.lang.String s = bs.toStringUtf8();
3274 if (bs.isValidUtf8()) {
3275 errorMessage_ = s;
3276 }
3277 return s;
3278 }
3279 }
3280
3281
3282
3283
3284
3285
3286
3287 public com.google.protobuf.ByteString
3288 getErrorMessageBytes() {
3289 java.lang.Object ref = errorMessage_;
3290 if (ref instanceof java.lang.String) {
3291 com.google.protobuf.ByteString b =
3292 com.google.protobuf.ByteString.copyFromUtf8(
3293 (java.lang.String) ref);
3294 errorMessage_ = b;
3295 return b;
3296 } else {
3297 return (com.google.protobuf.ByteString) ref;
3298 }
3299 }
3300
3301 private void initFields() {
3302 server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
3303 errorMessage_ = "";
3304 }
3305 private byte memoizedIsInitialized = -1;
3306 public final boolean isInitialized() {
3307 byte isInitialized = memoizedIsInitialized;
3308 if (isInitialized != -1) return isInitialized == 1;
3309
3310 if (!hasServer()) {
3311 memoizedIsInitialized = 0;
3312 return false;
3313 }
3314 if (!hasErrorMessage()) {
3315 memoizedIsInitialized = 0;
3316 return false;
3317 }
3318 if (!getServer().isInitialized()) {
3319 memoizedIsInitialized = 0;
3320 return false;
3321 }
3322 memoizedIsInitialized = 1;
3323 return true;
3324 }
3325
3326 public void writeTo(com.google.protobuf.CodedOutputStream output)
3327 throws java.io.IOException {
3328 getSerializedSize();
3329 if (((bitField0_ & 0x00000001) == 0x00000001)) {
3330 output.writeMessage(1, server_);
3331 }
3332 if (((bitField0_ & 0x00000002) == 0x00000002)) {
3333 output.writeBytes(2, getErrorMessageBytes());
3334 }
3335 getUnknownFields().writeTo(output);
3336 }
3337
3338 private int memoizedSerializedSize = -1;
3339 public int getSerializedSize() {
3340 int size = memoizedSerializedSize;
3341 if (size != -1) return size;
3342
3343 size = 0;
3344 if (((bitField0_ & 0x00000001) == 0x00000001)) {
3345 size += com.google.protobuf.CodedOutputStream
3346 .computeMessageSize(1, server_);
3347 }
3348 if (((bitField0_ & 0x00000002) == 0x00000002)) {
3349 size += com.google.protobuf.CodedOutputStream
3350 .computeBytesSize(2, getErrorMessageBytes());
3351 }
3352 size += getUnknownFields().getSerializedSize();
3353 memoizedSerializedSize = size;
3354 return size;
3355 }
3356
3357 private static final long serialVersionUID = 0L;
3358 @java.lang.Override
3359 protected java.lang.Object writeReplace()
3360 throws java.io.ObjectStreamException {
3361 return super.writeReplace();
3362 }
3363
3364 @java.lang.Override
3365 public boolean equals(final java.lang.Object obj) {
3366 if (obj == this) {
3367 return true;
3368 }
3369 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest)) {
3370 return super.equals(obj);
3371 }
3372 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest) obj;
3373
3374 boolean result = true;
3375 result = result && (hasServer() == other.hasServer());
3376 if (hasServer()) {
3377 result = result && getServer()
3378 .equals(other.getServer());
3379 }
3380 result = result && (hasErrorMessage() == other.hasErrorMessage());
3381 if (hasErrorMessage()) {
3382 result = result && getErrorMessage()
3383 .equals(other.getErrorMessage());
3384 }
3385 result = result &&
3386 getUnknownFields().equals(other.getUnknownFields());
3387 return result;
3388 }
3389
3390 private int memoizedHashCode = 0;
3391 @java.lang.Override
3392 public int hashCode() {
3393 if (memoizedHashCode != 0) {
3394 return memoizedHashCode;
3395 }
3396 int hash = 41;
3397 hash = (19 * hash) + getDescriptorForType().hashCode();
3398 if (hasServer()) {
3399 hash = (37 * hash) + SERVER_FIELD_NUMBER;
3400 hash = (53 * hash) + getServer().hashCode();
3401 }
3402 if (hasErrorMessage()) {
3403 hash = (37 * hash) + ERROR_MESSAGE_FIELD_NUMBER;
3404 hash = (53 * hash) + getErrorMessage().hashCode();
3405 }
3406 hash = (29 * hash) + getUnknownFields().hashCode();
3407 memoizedHashCode = hash;
3408 return hash;
3409 }
3410
3411 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest parseFrom(
3412 com.google.protobuf.ByteString data)
3413 throws com.google.protobuf.InvalidProtocolBufferException {
3414 return PARSER.parseFrom(data);
3415 }
3416 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest parseFrom(
3417 com.google.protobuf.ByteString data,
3418 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3419 throws com.google.protobuf.InvalidProtocolBufferException {
3420 return PARSER.parseFrom(data, extensionRegistry);
3421 }
3422 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest parseFrom(byte[] data)
3423 throws com.google.protobuf.InvalidProtocolBufferException {
3424 return PARSER.parseFrom(data);
3425 }
3426 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest parseFrom(
3427 byte[] data,
3428 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3429 throws com.google.protobuf.InvalidProtocolBufferException {
3430 return PARSER.parseFrom(data, extensionRegistry);
3431 }
3432 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest parseFrom(java.io.InputStream input)
3433 throws java.io.IOException {
3434 return PARSER.parseFrom(input);
3435 }
3436 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest parseFrom(
3437 java.io.InputStream input,
3438 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3439 throws java.io.IOException {
3440 return PARSER.parseFrom(input, extensionRegistry);
3441 }
3442 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest parseDelimitedFrom(java.io.InputStream input)
3443 throws java.io.IOException {
3444 return PARSER.parseDelimitedFrom(input);
3445 }
3446 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest parseDelimitedFrom(
3447 java.io.InputStream input,
3448 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3449 throws java.io.IOException {
3450 return PARSER.parseDelimitedFrom(input, extensionRegistry);
3451 }
3452 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest parseFrom(
3453 com.google.protobuf.CodedInputStream input)
3454 throws java.io.IOException {
3455 return PARSER.parseFrom(input);
3456 }
3457 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest parseFrom(
3458 com.google.protobuf.CodedInputStream input,
3459 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3460 throws java.io.IOException {
3461 return PARSER.parseFrom(input, extensionRegistry);
3462 }
3463
3464 public static Builder newBuilder() { return Builder.create(); }
3465 public Builder newBuilderForType() { return newBuilder(); }
3466 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest prototype) {
3467 return newBuilder().mergeFrom(prototype);
3468 }
3469 public Builder toBuilder() { return newBuilder(this); }
3470
3471 @java.lang.Override
3472 protected Builder newBuilderForType(
3473 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
3474 Builder builder = new Builder(parent);
3475 return builder;
3476 }
3477
3478
3479
3480 public static final class Builder extends
3481 com.google.protobuf.GeneratedMessage.Builder<Builder>
3482 implements org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequestOrBuilder {
3483 public static final com.google.protobuf.Descriptors.Descriptor
3484 getDescriptor() {
3485 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_ReportRSFatalErrorRequest_descriptor;
3486 }
3487
3488 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
3489 internalGetFieldAccessorTable() {
3490 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_ReportRSFatalErrorRequest_fieldAccessorTable
3491 .ensureFieldAccessorsInitialized(
3492 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest.class, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest.Builder.class);
3493 }
3494
3495
3496 private Builder() {
3497 maybeForceBuilderInitialization();
3498 }
3499
3500 private Builder(
3501 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
3502 super(parent);
3503 maybeForceBuilderInitialization();
3504 }
3505 private void maybeForceBuilderInitialization() {
3506 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
3507 getServerFieldBuilder();
3508 }
3509 }
3510 private static Builder create() {
3511 return new Builder();
3512 }
3513
3514 public Builder clear() {
3515 super.clear();
3516 if (serverBuilder_ == null) {
3517 server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
3518 } else {
3519 serverBuilder_.clear();
3520 }
3521 bitField0_ = (bitField0_ & ~0x00000001);
3522 errorMessage_ = "";
3523 bitField0_ = (bitField0_ & ~0x00000002);
3524 return this;
3525 }
3526
3527 public Builder clone() {
3528 return create().mergeFrom(buildPartial());
3529 }
3530
3531 public com.google.protobuf.Descriptors.Descriptor
3532 getDescriptorForType() {
3533 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_ReportRSFatalErrorRequest_descriptor;
3534 }
3535
3536 public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest getDefaultInstanceForType() {
3537 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest.getDefaultInstance();
3538 }
3539
3540 public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest build() {
3541 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest result = buildPartial();
3542 if (!result.isInitialized()) {
3543 throw newUninitializedMessageException(result);
3544 }
3545 return result;
3546 }
3547
3548 public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest buildPartial() {
3549 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest(this);
3550 int from_bitField0_ = bitField0_;
3551 int to_bitField0_ = 0;
3552 if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
3553 to_bitField0_ |= 0x00000001;
3554 }
3555 if (serverBuilder_ == null) {
3556 result.server_ = server_;
3557 } else {
3558 result.server_ = serverBuilder_.build();
3559 }
3560 if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
3561 to_bitField0_ |= 0x00000002;
3562 }
3563 result.errorMessage_ = errorMessage_;
3564 result.bitField0_ = to_bitField0_;
3565 onBuilt();
3566 return result;
3567 }
3568
3569 public Builder mergeFrom(com.google.protobuf.Message other) {
3570 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest) {
3571 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest)other);
3572 } else {
3573 super.mergeFrom(other);
3574 return this;
3575 }
3576 }
3577
3578 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest other) {
3579 if (other == org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest.getDefaultInstance()) return this;
3580 if (other.hasServer()) {
3581 mergeServer(other.getServer());
3582 }
3583 if (other.hasErrorMessage()) {
3584 bitField0_ |= 0x00000002;
3585 errorMessage_ = other.errorMessage_;
3586 onChanged();
3587 }
3588 this.mergeUnknownFields(other.getUnknownFields());
3589 return this;
3590 }
3591
3592 public final boolean isInitialized() {
3593 if (!hasServer()) {
3594
3595 return false;
3596 }
3597 if (!hasErrorMessage()) {
3598
3599 return false;
3600 }
3601 if (!getServer().isInitialized()) {
3602
3603 return false;
3604 }
3605 return true;
3606 }
3607
3608 public Builder mergeFrom(
3609 com.google.protobuf.CodedInputStream input,
3610 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3611 throws java.io.IOException {
3612 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest parsedMessage = null;
3613 try {
3614 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
3615 } catch (com.google.protobuf.InvalidProtocolBufferException e) {
3616 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest) e.getUnfinishedMessage();
3617 throw e;
3618 } finally {
3619 if (parsedMessage != null) {
3620 mergeFrom(parsedMessage);
3621 }
3622 }
3623 return this;
3624 }
3625 private int bitField0_;
3626
3627
3628 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
3629 private com.google.protobuf.SingleFieldBuilder<
3630 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverBuilder_;
3631
3632
3633
3634
3635
3636
3637
3638 public boolean hasServer() {
3639 return ((bitField0_ & 0x00000001) == 0x00000001);
3640 }
3641
3642
3643
3644
3645
3646
3647
3648 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer() {
3649 if (serverBuilder_ == null) {
3650 return server_;
3651 } else {
3652 return serverBuilder_.getMessage();
3653 }
3654 }
3655
3656
3657
3658
3659
3660
3661
3662 public Builder setServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) {
3663 if (serverBuilder_ == null) {
3664 if (value == null) {
3665 throw new NullPointerException();
3666 }
3667 server_ = value;
3668 onChanged();
3669 } else {
3670 serverBuilder_.setMessage(value);
3671 }
3672 bitField0_ |= 0x00000001;
3673 return this;
3674 }
3675
3676
3677
3678
3679
3680
3681
3682 public Builder setServer(
3683 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) {
3684 if (serverBuilder_ == null) {
3685 server_ = builderForValue.build();
3686 onChanged();
3687 } else {
3688 serverBuilder_.setMessage(builderForValue.build());
3689 }
3690 bitField0_ |= 0x00000001;
3691 return this;
3692 }
3693
3694
3695
3696
3697
3698
3699
3700 public Builder mergeServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) {
3701 if (serverBuilder_ == null) {
3702 if (((bitField0_ & 0x00000001) == 0x00000001) &&
3703 server_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) {
3704 server_ =
3705 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(server_).mergeFrom(value).buildPartial();
3706 } else {
3707 server_ = value;
3708 }
3709 onChanged();
3710 } else {
3711 serverBuilder_.mergeFrom(value);
3712 }
3713 bitField0_ |= 0x00000001;
3714 return this;
3715 }
3716
3717
3718
3719
3720
3721
3722
3723 public Builder clearServer() {
3724 if (serverBuilder_ == null) {
3725 server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
3726 onChanged();
3727 } else {
3728 serverBuilder_.clear();
3729 }
3730 bitField0_ = (bitField0_ & ~0x00000001);
3731 return this;
3732 }
3733
3734
3735
3736
3737
3738
3739
3740 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getServerBuilder() {
3741 bitField0_ |= 0x00000001;
3742 onChanged();
3743 return getServerFieldBuilder().getBuilder();
3744 }
3745
3746
3747
3748
3749
3750
3751
3752 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() {
3753 if (serverBuilder_ != null) {
3754 return serverBuilder_.getMessageOrBuilder();
3755 } else {
3756 return server_;
3757 }
3758 }
3759
3760
3761
3762
3763
3764
3765
3766 private com.google.protobuf.SingleFieldBuilder<
3767 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>
3768 getServerFieldBuilder() {
3769 if (serverBuilder_ == null) {
3770 serverBuilder_ = new com.google.protobuf.SingleFieldBuilder<
3771 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>(
3772 server_,
3773 getParentForChildren(),
3774 isClean());
3775 server_ = null;
3776 }
3777 return serverBuilder_;
3778 }
3779
3780
3781 private java.lang.Object errorMessage_ = "";
3782
3783
3784
3785
3786
3787
3788
3789 public boolean hasErrorMessage() {
3790 return ((bitField0_ & 0x00000002) == 0x00000002);
3791 }
3792
3793
3794
3795
3796
3797
3798
3799 public java.lang.String getErrorMessage() {
3800 java.lang.Object ref = errorMessage_;
3801 if (!(ref instanceof java.lang.String)) {
3802 java.lang.String s = ((com.google.protobuf.ByteString) ref)
3803 .toStringUtf8();
3804 errorMessage_ = s;
3805 return s;
3806 } else {
3807 return (java.lang.String) ref;
3808 }
3809 }
3810
3811
3812
3813
3814
3815
3816
3817 public com.google.protobuf.ByteString
3818 getErrorMessageBytes() {
3819 java.lang.Object ref = errorMessage_;
3820 if (ref instanceof String) {
3821 com.google.protobuf.ByteString b =
3822 com.google.protobuf.ByteString.copyFromUtf8(
3823 (java.lang.String) ref);
3824 errorMessage_ = b;
3825 return b;
3826 } else {
3827 return (com.google.protobuf.ByteString) ref;
3828 }
3829 }
3830
3831
3832
3833
3834
3835
3836
3837 public Builder setErrorMessage(
3838 java.lang.String value) {
3839 if (value == null) {
3840 throw new NullPointerException();
3841 }
3842 bitField0_ |= 0x00000002;
3843 errorMessage_ = value;
3844 onChanged();
3845 return this;
3846 }
3847
3848
3849
3850
3851
3852
3853
3854 public Builder clearErrorMessage() {
3855 bitField0_ = (bitField0_ & ~0x00000002);
3856 errorMessage_ = getDefaultInstance().getErrorMessage();
3857 onChanged();
3858 return this;
3859 }
3860
3861
3862
3863
3864
3865
3866
3867 public Builder setErrorMessageBytes(
3868 com.google.protobuf.ByteString value) {
3869 if (value == null) {
3870 throw new NullPointerException();
3871 }
3872 bitField0_ |= 0x00000002;
3873 errorMessage_ = value;
3874 onChanged();
3875 return this;
3876 }
3877
3878
3879 }
3880
3881 static {
3882 defaultInstance = new ReportRSFatalErrorRequest(true);
3883 defaultInstance.initFields();
3884 }
3885
3886
3887 }
3888
3889 public interface ReportRSFatalErrorResponseOrBuilder
3890 extends com.google.protobuf.MessageOrBuilder {
3891 }
3892
3893
3894
3895 public static final class ReportRSFatalErrorResponse extends
3896 com.google.protobuf.GeneratedMessage
3897 implements ReportRSFatalErrorResponseOrBuilder {
3898
3899 private ReportRSFatalErrorResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
3900 super(builder);
3901 this.unknownFields = builder.getUnknownFields();
3902 }
3903 private ReportRSFatalErrorResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
3904
3905 private static final ReportRSFatalErrorResponse defaultInstance;
3906 public static ReportRSFatalErrorResponse getDefaultInstance() {
3907 return defaultInstance;
3908 }
3909
3910 public ReportRSFatalErrorResponse getDefaultInstanceForType() {
3911 return defaultInstance;
3912 }
3913
3914 private final com.google.protobuf.UnknownFieldSet unknownFields;
3915 @java.lang.Override
3916 public final com.google.protobuf.UnknownFieldSet
3917 getUnknownFields() {
3918 return this.unknownFields;
3919 }
3920 private ReportRSFatalErrorResponse(
3921 com.google.protobuf.CodedInputStream input,
3922 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3923 throws com.google.protobuf.InvalidProtocolBufferException {
3924 initFields();
3925 com.google.protobuf.UnknownFieldSet.Builder unknownFields =
3926 com.google.protobuf.UnknownFieldSet.newBuilder();
3927 try {
3928 boolean done = false;
3929 while (!done) {
3930 int tag = input.readTag();
3931 switch (tag) {
3932 case 0:
3933 done = true;
3934 break;
3935 default: {
3936 if (!parseUnknownField(input, unknownFields,
3937 extensionRegistry, tag)) {
3938 done = true;
3939 }
3940 break;
3941 }
3942 }
3943 }
3944 } catch (com.google.protobuf.InvalidProtocolBufferException e) {
3945 throw e.setUnfinishedMessage(this);
3946 } catch (java.io.IOException e) {
3947 throw new com.google.protobuf.InvalidProtocolBufferException(
3948 e.getMessage()).setUnfinishedMessage(this);
3949 } finally {
3950 this.unknownFields = unknownFields.build();
3951 makeExtensionsImmutable();
3952 }
3953 }
3954 public static final com.google.protobuf.Descriptors.Descriptor
3955 getDescriptor() {
3956 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_ReportRSFatalErrorResponse_descriptor;
3957 }
3958
3959 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
3960 internalGetFieldAccessorTable() {
3961 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_ReportRSFatalErrorResponse_fieldAccessorTable
3962 .ensureFieldAccessorsInitialized(
3963 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse.class, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse.Builder.class);
3964 }
3965
3966 public static com.google.protobuf.Parser<ReportRSFatalErrorResponse> PARSER =
3967 new com.google.protobuf.AbstractParser<ReportRSFatalErrorResponse>() {
3968 public ReportRSFatalErrorResponse parsePartialFrom(
3969 com.google.protobuf.CodedInputStream input,
3970 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3971 throws com.google.protobuf.InvalidProtocolBufferException {
3972 return new ReportRSFatalErrorResponse(input, extensionRegistry);
3973 }
3974 };
3975
3976 @java.lang.Override
3977 public com.google.protobuf.Parser<ReportRSFatalErrorResponse> getParserForType() {
3978 return PARSER;
3979 }
3980
3981 private void initFields() {
3982 }
3983 private byte memoizedIsInitialized = -1;
3984 public final boolean isInitialized() {
3985 byte isInitialized = memoizedIsInitialized;
3986 if (isInitialized != -1) return isInitialized == 1;
3987
3988 memoizedIsInitialized = 1;
3989 return true;
3990 }
3991
3992 public void writeTo(com.google.protobuf.CodedOutputStream output)
3993 throws java.io.IOException {
3994 getSerializedSize();
3995 getUnknownFields().writeTo(output);
3996 }
3997
3998 private int memoizedSerializedSize = -1;
3999 public int getSerializedSize() {
4000 int size = memoizedSerializedSize;
4001 if (size != -1) return size;
4002
4003 size = 0;
4004 size += getUnknownFields().getSerializedSize();
4005 memoizedSerializedSize = size;
4006 return size;
4007 }
4008
4009 private static final long serialVersionUID = 0L;
4010 @java.lang.Override
4011 protected java.lang.Object writeReplace()
4012 throws java.io.ObjectStreamException {
4013 return super.writeReplace();
4014 }
4015
4016 @java.lang.Override
4017 public boolean equals(final java.lang.Object obj) {
4018 if (obj == this) {
4019 return true;
4020 }
4021 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse)) {
4022 return super.equals(obj);
4023 }
4024 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse) obj;
4025
4026 boolean result = true;
4027 result = result &&
4028 getUnknownFields().equals(other.getUnknownFields());
4029 return result;
4030 }
4031
4032 private int memoizedHashCode = 0;
4033 @java.lang.Override
4034 public int hashCode() {
4035 if (memoizedHashCode != 0) {
4036 return memoizedHashCode;
4037 }
4038 int hash = 41;
4039 hash = (19 * hash) + getDescriptorForType().hashCode();
4040 hash = (29 * hash) + getUnknownFields().hashCode();
4041 memoizedHashCode = hash;
4042 return hash;
4043 }
4044
4045 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse parseFrom(
4046 com.google.protobuf.ByteString data)
4047 throws com.google.protobuf.InvalidProtocolBufferException {
4048 return PARSER.parseFrom(data);
4049 }
4050 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse parseFrom(
4051 com.google.protobuf.ByteString data,
4052 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4053 throws com.google.protobuf.InvalidProtocolBufferException {
4054 return PARSER.parseFrom(data, extensionRegistry);
4055 }
4056 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse parseFrom(byte[] data)
4057 throws com.google.protobuf.InvalidProtocolBufferException {
4058 return PARSER.parseFrom(data);
4059 }
4060 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse parseFrom(
4061 byte[] data,
4062 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4063 throws com.google.protobuf.InvalidProtocolBufferException {
4064 return PARSER.parseFrom(data, extensionRegistry);
4065 }
4066 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse parseFrom(java.io.InputStream input)
4067 throws java.io.IOException {
4068 return PARSER.parseFrom(input);
4069 }
4070 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse parseFrom(
4071 java.io.InputStream input,
4072 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4073 throws java.io.IOException {
4074 return PARSER.parseFrom(input, extensionRegistry);
4075 }
4076 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse parseDelimitedFrom(java.io.InputStream input)
4077 throws java.io.IOException {
4078 return PARSER.parseDelimitedFrom(input);
4079 }
4080 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse parseDelimitedFrom(
4081 java.io.InputStream input,
4082 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4083 throws java.io.IOException {
4084 return PARSER.parseDelimitedFrom(input, extensionRegistry);
4085 }
4086 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse parseFrom(
4087 com.google.protobuf.CodedInputStream input)
4088 throws java.io.IOException {
4089 return PARSER.parseFrom(input);
4090 }
4091 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse parseFrom(
4092 com.google.protobuf.CodedInputStream input,
4093 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4094 throws java.io.IOException {
4095 return PARSER.parseFrom(input, extensionRegistry);
4096 }
4097
4098 public static Builder newBuilder() { return Builder.create(); }
4099 public Builder newBuilderForType() { return newBuilder(); }
4100 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse prototype) {
4101 return newBuilder().mergeFrom(prototype);
4102 }
4103 public Builder toBuilder() { return newBuilder(this); }
4104
4105 @java.lang.Override
4106 protected Builder newBuilderForType(
4107 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
4108 Builder builder = new Builder(parent);
4109 return builder;
4110 }
4111
4112
4113
4114 public static final class Builder extends
4115 com.google.protobuf.GeneratedMessage.Builder<Builder>
4116 implements org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponseOrBuilder {
4117 public static final com.google.protobuf.Descriptors.Descriptor
4118 getDescriptor() {
4119 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_ReportRSFatalErrorResponse_descriptor;
4120 }
4121
4122 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
4123 internalGetFieldAccessorTable() {
4124 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_ReportRSFatalErrorResponse_fieldAccessorTable
4125 .ensureFieldAccessorsInitialized(
4126 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse.class, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse.Builder.class);
4127 }
4128
4129
4130 private Builder() {
4131 maybeForceBuilderInitialization();
4132 }
4133
4134 private Builder(
4135 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
4136 super(parent);
4137 maybeForceBuilderInitialization();
4138 }
4139 private void maybeForceBuilderInitialization() {
4140 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
4141 }
4142 }
4143 private static Builder create() {
4144 return new Builder();
4145 }
4146
4147 public Builder clear() {
4148 super.clear();
4149 return this;
4150 }
4151
4152 public Builder clone() {
4153 return create().mergeFrom(buildPartial());
4154 }
4155
4156 public com.google.protobuf.Descriptors.Descriptor
4157 getDescriptorForType() {
4158 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_ReportRSFatalErrorResponse_descriptor;
4159 }
4160
4161 public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse getDefaultInstanceForType() {
4162 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse.getDefaultInstance();
4163 }
4164
4165 public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse build() {
4166 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse result = buildPartial();
4167 if (!result.isInitialized()) {
4168 throw newUninitializedMessageException(result);
4169 }
4170 return result;
4171 }
4172
4173 public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse buildPartial() {
4174 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse(this);
4175 onBuilt();
4176 return result;
4177 }
4178
4179 public Builder mergeFrom(com.google.protobuf.Message other) {
4180 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse) {
4181 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse)other);
4182 } else {
4183 super.mergeFrom(other);
4184 return this;
4185 }
4186 }
4187
4188 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse other) {
4189 if (other == org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse.getDefaultInstance()) return this;
4190 this.mergeUnknownFields(other.getUnknownFields());
4191 return this;
4192 }
4193
4194 public final boolean isInitialized() {
4195 return true;
4196 }
4197
4198 public Builder mergeFrom(
4199 com.google.protobuf.CodedInputStream input,
4200 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4201 throws java.io.IOException {
4202 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse parsedMessage = null;
4203 try {
4204 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
4205 } catch (com.google.protobuf.InvalidProtocolBufferException e) {
4206 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse) e.getUnfinishedMessage();
4207 throw e;
4208 } finally {
4209 if (parsedMessage != null) {
4210 mergeFrom(parsedMessage);
4211 }
4212 }
4213 return this;
4214 }
4215
4216
4217 }
4218
4219 static {
4220 defaultInstance = new ReportRSFatalErrorResponse(true);
4221 defaultInstance.initFields();
4222 }
4223
4224
4225 }
4226
4227 public interface GetLastFlushedSequenceIdRequestOrBuilder
4228 extends com.google.protobuf.MessageOrBuilder {
4229
4230
4231
4232
4233
4234
4235
4236
4237
4238 boolean hasRegionName();
4239
4240
4241
4242
4243
4244
4245
4246 com.google.protobuf.ByteString getRegionName();
4247 }
4248
4249
4250
4251 public static final class GetLastFlushedSequenceIdRequest extends
4252 com.google.protobuf.GeneratedMessage
4253 implements GetLastFlushedSequenceIdRequestOrBuilder {
4254
4255 private GetLastFlushedSequenceIdRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
4256 super(builder);
4257 this.unknownFields = builder.getUnknownFields();
4258 }
4259 private GetLastFlushedSequenceIdRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
4260
4261 private static final GetLastFlushedSequenceIdRequest defaultInstance;
4262 public static GetLastFlushedSequenceIdRequest getDefaultInstance() {
4263 return defaultInstance;
4264 }
4265
4266 public GetLastFlushedSequenceIdRequest getDefaultInstanceForType() {
4267 return defaultInstance;
4268 }
4269
4270 private final com.google.protobuf.UnknownFieldSet unknownFields;
4271 @java.lang.Override
4272 public final com.google.protobuf.UnknownFieldSet
4273 getUnknownFields() {
4274 return this.unknownFields;
4275 }
4276 private GetLastFlushedSequenceIdRequest(
4277 com.google.protobuf.CodedInputStream input,
4278 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4279 throws com.google.protobuf.InvalidProtocolBufferException {
4280 initFields();
4281 int mutable_bitField0_ = 0;
4282 com.google.protobuf.UnknownFieldSet.Builder unknownFields =
4283 com.google.protobuf.UnknownFieldSet.newBuilder();
4284 try {
4285 boolean done = false;
4286 while (!done) {
4287 int tag = input.readTag();
4288 switch (tag) {
4289 case 0:
4290 done = true;
4291 break;
4292 default: {
4293 if (!parseUnknownField(input, unknownFields,
4294 extensionRegistry, tag)) {
4295 done = true;
4296 }
4297 break;
4298 }
4299 case 10: {
4300 bitField0_ |= 0x00000001;
4301 regionName_ = input.readBytes();
4302 break;
4303 }
4304 }
4305 }
4306 } catch (com.google.protobuf.InvalidProtocolBufferException e) {
4307 throw e.setUnfinishedMessage(this);
4308 } catch (java.io.IOException e) {
4309 throw new com.google.protobuf.InvalidProtocolBufferException(
4310 e.getMessage()).setUnfinishedMessage(this);
4311 } finally {
4312 this.unknownFields = unknownFields.build();
4313 makeExtensionsImmutable();
4314 }
4315 }
4316 public static final com.google.protobuf.Descriptors.Descriptor
4317 getDescriptor() {
4318 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_GetLastFlushedSequenceIdRequest_descriptor;
4319 }
4320
4321 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
4322 internalGetFieldAccessorTable() {
4323 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_GetLastFlushedSequenceIdRequest_fieldAccessorTable
4324 .ensureFieldAccessorsInitialized(
4325 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest.class, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest.Builder.class);
4326 }
4327
4328 public static com.google.protobuf.Parser<GetLastFlushedSequenceIdRequest> PARSER =
4329 new com.google.protobuf.AbstractParser<GetLastFlushedSequenceIdRequest>() {
4330 public GetLastFlushedSequenceIdRequest parsePartialFrom(
4331 com.google.protobuf.CodedInputStream input,
4332 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4333 throws com.google.protobuf.InvalidProtocolBufferException {
4334 return new GetLastFlushedSequenceIdRequest(input, extensionRegistry);
4335 }
4336 };
4337
4338 @java.lang.Override
4339 public com.google.protobuf.Parser<GetLastFlushedSequenceIdRequest> getParserForType() {
4340 return PARSER;
4341 }
4342
4343 private int bitField0_;
4344
4345 public static final int REGION_NAME_FIELD_NUMBER = 1;
4346 private com.google.protobuf.ByteString regionName_;
4347
4348
4349
4350
4351
4352
4353
4354 public boolean hasRegionName() {
4355 return ((bitField0_ & 0x00000001) == 0x00000001);
4356 }
4357
4358
4359
4360
4361
4362
4363
4364 public com.google.protobuf.ByteString getRegionName() {
4365 return regionName_;
4366 }
4367
4368 private void initFields() {
4369 regionName_ = com.google.protobuf.ByteString.EMPTY;
4370 }
4371 private byte memoizedIsInitialized = -1;
4372 public final boolean isInitialized() {
4373 byte isInitialized = memoizedIsInitialized;
4374 if (isInitialized != -1) return isInitialized == 1;
4375
4376 if (!hasRegionName()) {
4377 memoizedIsInitialized = 0;
4378 return false;
4379 }
4380 memoizedIsInitialized = 1;
4381 return true;
4382 }
4383
4384 public void writeTo(com.google.protobuf.CodedOutputStream output)
4385 throws java.io.IOException {
4386 getSerializedSize();
4387 if (((bitField0_ & 0x00000001) == 0x00000001)) {
4388 output.writeBytes(1, regionName_);
4389 }
4390 getUnknownFields().writeTo(output);
4391 }
4392
4393 private int memoizedSerializedSize = -1;
4394 public int getSerializedSize() {
4395 int size = memoizedSerializedSize;
4396 if (size != -1) return size;
4397
4398 size = 0;
4399 if (((bitField0_ & 0x00000001) == 0x00000001)) {
4400 size += com.google.protobuf.CodedOutputStream
4401 .computeBytesSize(1, regionName_);
4402 }
4403 size += getUnknownFields().getSerializedSize();
4404 memoizedSerializedSize = size;
4405 return size;
4406 }
4407
4408 private static final long serialVersionUID = 0L;
4409 @java.lang.Override
4410 protected java.lang.Object writeReplace()
4411 throws java.io.ObjectStreamException {
4412 return super.writeReplace();
4413 }
4414
4415 @java.lang.Override
4416 public boolean equals(final java.lang.Object obj) {
4417 if (obj == this) {
4418 return true;
4419 }
4420 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest)) {
4421 return super.equals(obj);
4422 }
4423 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest) obj;
4424
4425 boolean result = true;
4426 result = result && (hasRegionName() == other.hasRegionName());
4427 if (hasRegionName()) {
4428 result = result && getRegionName()
4429 .equals(other.getRegionName());
4430 }
4431 result = result &&
4432 getUnknownFields().equals(other.getUnknownFields());
4433 return result;
4434 }
4435
4436 private int memoizedHashCode = 0;
4437 @java.lang.Override
4438 public int hashCode() {
4439 if (memoizedHashCode != 0) {
4440 return memoizedHashCode;
4441 }
4442 int hash = 41;
4443 hash = (19 * hash) + getDescriptorForType().hashCode();
4444 if (hasRegionName()) {
4445 hash = (37 * hash) + REGION_NAME_FIELD_NUMBER;
4446 hash = (53 * hash) + getRegionName().hashCode();
4447 }
4448 hash = (29 * hash) + getUnknownFields().hashCode();
4449 memoizedHashCode = hash;
4450 return hash;
4451 }
4452
4453 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest parseFrom(
4454 com.google.protobuf.ByteString data)
4455 throws com.google.protobuf.InvalidProtocolBufferException {
4456 return PARSER.parseFrom(data);
4457 }
4458 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest parseFrom(
4459 com.google.protobuf.ByteString data,
4460 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4461 throws com.google.protobuf.InvalidProtocolBufferException {
4462 return PARSER.parseFrom(data, extensionRegistry);
4463 }
4464 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest parseFrom(byte[] data)
4465 throws com.google.protobuf.InvalidProtocolBufferException {
4466 return PARSER.parseFrom(data);
4467 }
4468 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest parseFrom(
4469 byte[] data,
4470 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4471 throws com.google.protobuf.InvalidProtocolBufferException {
4472 return PARSER.parseFrom(data, extensionRegistry);
4473 }
4474 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest parseFrom(java.io.InputStream input)
4475 throws java.io.IOException {
4476 return PARSER.parseFrom(input);
4477 }
4478 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest parseFrom(
4479 java.io.InputStream input,
4480 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4481 throws java.io.IOException {
4482 return PARSER.parseFrom(input, extensionRegistry);
4483 }
4484 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest parseDelimitedFrom(java.io.InputStream input)
4485 throws java.io.IOException {
4486 return PARSER.parseDelimitedFrom(input);
4487 }
4488 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest parseDelimitedFrom(
4489 java.io.InputStream input,
4490 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4491 throws java.io.IOException {
4492 return PARSER.parseDelimitedFrom(input, extensionRegistry);
4493 }
4494 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest parseFrom(
4495 com.google.protobuf.CodedInputStream input)
4496 throws java.io.IOException {
4497 return PARSER.parseFrom(input);
4498 }
4499 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest parseFrom(
4500 com.google.protobuf.CodedInputStream input,
4501 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4502 throws java.io.IOException {
4503 return PARSER.parseFrom(input, extensionRegistry);
4504 }
4505
4506 public static Builder newBuilder() { return Builder.create(); }
4507 public Builder newBuilderForType() { return newBuilder(); }
4508 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest prototype) {
4509 return newBuilder().mergeFrom(prototype);
4510 }
4511 public Builder toBuilder() { return newBuilder(this); }
4512
4513 @java.lang.Override
4514 protected Builder newBuilderForType(
4515 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
4516 Builder builder = new Builder(parent);
4517 return builder;
4518 }
4519
4520
4521
4522 public static final class Builder extends
4523 com.google.protobuf.GeneratedMessage.Builder<Builder>
4524 implements org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequestOrBuilder {
4525 public static final com.google.protobuf.Descriptors.Descriptor
4526 getDescriptor() {
4527 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_GetLastFlushedSequenceIdRequest_descriptor;
4528 }
4529
4530 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
4531 internalGetFieldAccessorTable() {
4532 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_GetLastFlushedSequenceIdRequest_fieldAccessorTable
4533 .ensureFieldAccessorsInitialized(
4534 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest.class, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest.Builder.class);
4535 }
4536
4537
4538 private Builder() {
4539 maybeForceBuilderInitialization();
4540 }
4541
4542 private Builder(
4543 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
4544 super(parent);
4545 maybeForceBuilderInitialization();
4546 }
4547 private void maybeForceBuilderInitialization() {
4548 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
4549 }
4550 }
4551 private static Builder create() {
4552 return new Builder();
4553 }
4554
4555 public Builder clear() {
4556 super.clear();
4557 regionName_ = com.google.protobuf.ByteString.EMPTY;
4558 bitField0_ = (bitField0_ & ~0x00000001);
4559 return this;
4560 }
4561
4562 public Builder clone() {
4563 return create().mergeFrom(buildPartial());
4564 }
4565
4566 public com.google.protobuf.Descriptors.Descriptor
4567 getDescriptorForType() {
4568 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_GetLastFlushedSequenceIdRequest_descriptor;
4569 }
4570
4571 public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest getDefaultInstanceForType() {
4572 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest.getDefaultInstance();
4573 }
4574
4575 public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest build() {
4576 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest result = buildPartial();
4577 if (!result.isInitialized()) {
4578 throw newUninitializedMessageException(result);
4579 }
4580 return result;
4581 }
4582
4583 public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest buildPartial() {
4584 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest(this);
4585 int from_bitField0_ = bitField0_;
4586 int to_bitField0_ = 0;
4587 if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
4588 to_bitField0_ |= 0x00000001;
4589 }
4590 result.regionName_ = regionName_;
4591 result.bitField0_ = to_bitField0_;
4592 onBuilt();
4593 return result;
4594 }
4595
4596 public Builder mergeFrom(com.google.protobuf.Message other) {
4597 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest) {
4598 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest)other);
4599 } else {
4600 super.mergeFrom(other);
4601 return this;
4602 }
4603 }
4604
4605 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest other) {
4606 if (other == org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest.getDefaultInstance()) return this;
4607 if (other.hasRegionName()) {
4608 setRegionName(other.getRegionName());
4609 }
4610 this.mergeUnknownFields(other.getUnknownFields());
4611 return this;
4612 }
4613
4614 public final boolean isInitialized() {
4615 if (!hasRegionName()) {
4616
4617 return false;
4618 }
4619 return true;
4620 }
4621
4622 public Builder mergeFrom(
4623 com.google.protobuf.CodedInputStream input,
4624 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4625 throws java.io.IOException {
4626 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest parsedMessage = null;
4627 try {
4628 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
4629 } catch (com.google.protobuf.InvalidProtocolBufferException e) {
4630 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest) e.getUnfinishedMessage();
4631 throw e;
4632 } finally {
4633 if (parsedMessage != null) {
4634 mergeFrom(parsedMessage);
4635 }
4636 }
4637 return this;
4638 }
4639 private int bitField0_;
4640
4641
4642 private com.google.protobuf.ByteString regionName_ = com.google.protobuf.ByteString.EMPTY;
4643
4644
4645
4646
4647
4648
4649
4650 public boolean hasRegionName() {
4651 return ((bitField0_ & 0x00000001) == 0x00000001);
4652 }
4653
4654
4655
4656
4657
4658
4659
4660 public com.google.protobuf.ByteString getRegionName() {
4661 return regionName_;
4662 }
4663
4664
4665
4666
4667
4668
4669
4670 public Builder setRegionName(com.google.protobuf.ByteString value) {
4671 if (value == null) {
4672 throw new NullPointerException();
4673 }
4674 bitField0_ |= 0x00000001;
4675 regionName_ = value;
4676 onChanged();
4677 return this;
4678 }
4679
4680
4681
4682
4683
4684
4685
4686 public Builder clearRegionName() {
4687 bitField0_ = (bitField0_ & ~0x00000001);
4688 regionName_ = getDefaultInstance().getRegionName();
4689 onChanged();
4690 return this;
4691 }
4692
4693
4694 }
4695
4696 static {
4697 defaultInstance = new GetLastFlushedSequenceIdRequest(true);
4698 defaultInstance.initFields();
4699 }
4700
4701
4702 }
4703
4704 public interface GetLastFlushedSequenceIdResponseOrBuilder
4705 extends com.google.protobuf.MessageOrBuilder {
4706
4707
4708
4709
4710
4711
4712
4713
4714
4715 boolean hasLastFlushedSequenceId();
4716
4717
4718
4719
4720
4721
4722
4723 long getLastFlushedSequenceId();
4724
4725
4726
4727
4728
4729
4730
4731
4732
4733 java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId>
4734 getStoreLastFlushedSequenceIdList();
4735
4736
4737
4738
4739
4740
4741
4742 org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId getStoreLastFlushedSequenceId(int index);
4743
4744
4745
4746
4747
4748
4749
4750 int getStoreLastFlushedSequenceIdCount();
4751
4752
4753
4754
4755
4756
4757
4758 java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceIdOrBuilder>
4759 getStoreLastFlushedSequenceIdOrBuilderList();
4760
4761
4762
4763
4764
4765
4766
4767 org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceIdOrBuilder getStoreLastFlushedSequenceIdOrBuilder(
4768 int index);
4769 }
4770
4771
4772
4773 public static final class GetLastFlushedSequenceIdResponse extends
4774 com.google.protobuf.GeneratedMessage
4775 implements GetLastFlushedSequenceIdResponseOrBuilder {
4776
4777 private GetLastFlushedSequenceIdResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
4778 super(builder);
4779 this.unknownFields = builder.getUnknownFields();
4780 }
4781 private GetLastFlushedSequenceIdResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
4782
4783 private static final GetLastFlushedSequenceIdResponse defaultInstance;
4784 public static GetLastFlushedSequenceIdResponse getDefaultInstance() {
4785 return defaultInstance;
4786 }
4787
4788 public GetLastFlushedSequenceIdResponse getDefaultInstanceForType() {
4789 return defaultInstance;
4790 }
4791
4792 private final com.google.protobuf.UnknownFieldSet unknownFields;
4793 @java.lang.Override
4794 public final com.google.protobuf.UnknownFieldSet
4795 getUnknownFields() {
4796 return this.unknownFields;
4797 }
4798 private GetLastFlushedSequenceIdResponse(
4799 com.google.protobuf.CodedInputStream input,
4800 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4801 throws com.google.protobuf.InvalidProtocolBufferException {
4802 initFields();
4803 int mutable_bitField0_ = 0;
4804 com.google.protobuf.UnknownFieldSet.Builder unknownFields =
4805 com.google.protobuf.UnknownFieldSet.newBuilder();
4806 try {
4807 boolean done = false;
4808 while (!done) {
4809 int tag = input.readTag();
4810 switch (tag) {
4811 case 0:
4812 done = true;
4813 break;
4814 default: {
4815 if (!parseUnknownField(input, unknownFields,
4816 extensionRegistry, tag)) {
4817 done = true;
4818 }
4819 break;
4820 }
4821 case 8: {
4822 bitField0_ |= 0x00000001;
4823 lastFlushedSequenceId_ = input.readUInt64();
4824 break;
4825 }
4826 case 18: {
4827 if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
4828 storeLastFlushedSequenceId_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId>();
4829 mutable_bitField0_ |= 0x00000002;
4830 }
4831 storeLastFlushedSequenceId_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId.PARSER, extensionRegistry));
4832 break;
4833 }
4834 }
4835 }
4836 } catch (com.google.protobuf.InvalidProtocolBufferException e) {
4837 throw e.setUnfinishedMessage(this);
4838 } catch (java.io.IOException e) {
4839 throw new com.google.protobuf.InvalidProtocolBufferException(
4840 e.getMessage()).setUnfinishedMessage(this);
4841 } finally {
4842 if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
4843 storeLastFlushedSequenceId_ = java.util.Collections.unmodifiableList(storeLastFlushedSequenceId_);
4844 }
4845 this.unknownFields = unknownFields.build();
4846 makeExtensionsImmutable();
4847 }
4848 }
4849 public static final com.google.protobuf.Descriptors.Descriptor
4850 getDescriptor() {
4851 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_GetLastFlushedSequenceIdResponse_descriptor;
4852 }
4853
4854 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
4855 internalGetFieldAccessorTable() {
4856 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_GetLastFlushedSequenceIdResponse_fieldAccessorTable
4857 .ensureFieldAccessorsInitialized(
4858 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse.class, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse.Builder.class);
4859 }
4860
4861 public static com.google.protobuf.Parser<GetLastFlushedSequenceIdResponse> PARSER =
4862 new com.google.protobuf.AbstractParser<GetLastFlushedSequenceIdResponse>() {
4863 public GetLastFlushedSequenceIdResponse parsePartialFrom(
4864 com.google.protobuf.CodedInputStream input,
4865 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4866 throws com.google.protobuf.InvalidProtocolBufferException {
4867 return new GetLastFlushedSequenceIdResponse(input, extensionRegistry);
4868 }
4869 };
4870
4871 @java.lang.Override
4872 public com.google.protobuf.Parser<GetLastFlushedSequenceIdResponse> getParserForType() {
4873 return PARSER;
4874 }
4875
4876 private int bitField0_;
4877
4878 public static final int LAST_FLUSHED_SEQUENCE_ID_FIELD_NUMBER = 1;
4879 private long lastFlushedSequenceId_;
4880
4881
4882
4883
4884
4885
4886
4887 public boolean hasLastFlushedSequenceId() {
4888 return ((bitField0_ & 0x00000001) == 0x00000001);
4889 }
4890
4891
4892
4893
4894
4895
4896
4897 public long getLastFlushedSequenceId() {
4898 return lastFlushedSequenceId_;
4899 }
4900
4901
4902 public static final int STORE_LAST_FLUSHED_SEQUENCE_ID_FIELD_NUMBER = 2;
4903 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId> storeLastFlushedSequenceId_;
4904
4905
4906
4907
4908
4909
4910
4911 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId> getStoreLastFlushedSequenceIdList() {
4912 return storeLastFlushedSequenceId_;
4913 }
4914
4915
4916
4917
4918
4919
4920
4921 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceIdOrBuilder>
4922 getStoreLastFlushedSequenceIdOrBuilderList() {
4923 return storeLastFlushedSequenceId_;
4924 }
4925
4926
4927
4928
4929
4930
4931
4932 public int getStoreLastFlushedSequenceIdCount() {
4933 return storeLastFlushedSequenceId_.size();
4934 }
4935
4936
4937
4938
4939
4940
4941
4942 public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId getStoreLastFlushedSequenceId(int index) {
4943 return storeLastFlushedSequenceId_.get(index);
4944 }
4945
4946
4947
4948
4949
4950
4951
4952 public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceIdOrBuilder getStoreLastFlushedSequenceIdOrBuilder(
4953 int index) {
4954 return storeLastFlushedSequenceId_.get(index);
4955 }
4956
4957 private void initFields() {
4958 lastFlushedSequenceId_ = 0L;
4959 storeLastFlushedSequenceId_ = java.util.Collections.emptyList();
4960 }
4961 private byte memoizedIsInitialized = -1;
4962 public final boolean isInitialized() {
4963 byte isInitialized = memoizedIsInitialized;
4964 if (isInitialized != -1) return isInitialized == 1;
4965
4966 if (!hasLastFlushedSequenceId()) {
4967 memoizedIsInitialized = 0;
4968 return false;
4969 }
4970 for (int i = 0; i < getStoreLastFlushedSequenceIdCount(); i++) {
4971 if (!getStoreLastFlushedSequenceId(i).isInitialized()) {
4972 memoizedIsInitialized = 0;
4973 return false;
4974 }
4975 }
4976 memoizedIsInitialized = 1;
4977 return true;
4978 }
4979
4980 public void writeTo(com.google.protobuf.CodedOutputStream output)
4981 throws java.io.IOException {
4982 getSerializedSize();
4983 if (((bitField0_ & 0x00000001) == 0x00000001)) {
4984 output.writeUInt64(1, lastFlushedSequenceId_);
4985 }
4986 for (int i = 0; i < storeLastFlushedSequenceId_.size(); i++) {
4987 output.writeMessage(2, storeLastFlushedSequenceId_.get(i));
4988 }
4989 getUnknownFields().writeTo(output);
4990 }
4991
4992 private int memoizedSerializedSize = -1;
4993 public int getSerializedSize() {
4994 int size = memoizedSerializedSize;
4995 if (size != -1) return size;
4996
4997 size = 0;
4998 if (((bitField0_ & 0x00000001) == 0x00000001)) {
4999 size += com.google.protobuf.CodedOutputStream
5000 .computeUInt64Size(1, lastFlushedSequenceId_);
5001 }
5002 for (int i = 0; i < storeLastFlushedSequenceId_.size(); i++) {
5003 size += com.google.protobuf.CodedOutputStream
5004 .computeMessageSize(2, storeLastFlushedSequenceId_.get(i));
5005 }
5006 size += getUnknownFields().getSerializedSize();
5007 memoizedSerializedSize = size;
5008 return size;
5009 }
5010
5011 private static final long serialVersionUID = 0L;
5012 @java.lang.Override
5013 protected java.lang.Object writeReplace()
5014 throws java.io.ObjectStreamException {
5015 return super.writeReplace();
5016 }
5017
5018 @java.lang.Override
5019 public boolean equals(final java.lang.Object obj) {
5020 if (obj == this) {
5021 return true;
5022 }
5023 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse)) {
5024 return super.equals(obj);
5025 }
5026 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse) obj;
5027
5028 boolean result = true;
5029 result = result && (hasLastFlushedSequenceId() == other.hasLastFlushedSequenceId());
5030 if (hasLastFlushedSequenceId()) {
5031 result = result && (getLastFlushedSequenceId()
5032 == other.getLastFlushedSequenceId());
5033 }
5034 result = result && getStoreLastFlushedSequenceIdList()
5035 .equals(other.getStoreLastFlushedSequenceIdList());
5036 result = result &&
5037 getUnknownFields().equals(other.getUnknownFields());
5038 return result;
5039 }
5040
5041 private int memoizedHashCode = 0;
5042 @java.lang.Override
5043 public int hashCode() {
5044 if (memoizedHashCode != 0) {
5045 return memoizedHashCode;
5046 }
5047 int hash = 41;
5048 hash = (19 * hash) + getDescriptorForType().hashCode();
5049 if (hasLastFlushedSequenceId()) {
5050 hash = (37 * hash) + LAST_FLUSHED_SEQUENCE_ID_FIELD_NUMBER;
5051 hash = (53 * hash) + hashLong(getLastFlushedSequenceId());
5052 }
5053 if (getStoreLastFlushedSequenceIdCount() > 0) {
5054 hash = (37 * hash) + STORE_LAST_FLUSHED_SEQUENCE_ID_FIELD_NUMBER;
5055 hash = (53 * hash) + getStoreLastFlushedSequenceIdList().hashCode();
5056 }
5057 hash = (29 * hash) + getUnknownFields().hashCode();
5058 memoizedHashCode = hash;
5059 return hash;
5060 }
5061
5062 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse parseFrom(
5063 com.google.protobuf.ByteString data)
5064 throws com.google.protobuf.InvalidProtocolBufferException {
5065 return PARSER.parseFrom(data);
5066 }
5067 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse parseFrom(
5068 com.google.protobuf.ByteString data,
5069 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5070 throws com.google.protobuf.InvalidProtocolBufferException {
5071 return PARSER.parseFrom(data, extensionRegistry);
5072 }
5073 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse parseFrom(byte[] data)
5074 throws com.google.protobuf.InvalidProtocolBufferException {
5075 return PARSER.parseFrom(data);
5076 }
5077 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse parseFrom(
5078 byte[] data,
5079 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5080 throws com.google.protobuf.InvalidProtocolBufferException {
5081 return PARSER.parseFrom(data, extensionRegistry);
5082 }
5083 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse parseFrom(java.io.InputStream input)
5084 throws java.io.IOException {
5085 return PARSER.parseFrom(input);
5086 }
5087 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse parseFrom(
5088 java.io.InputStream input,
5089 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5090 throws java.io.IOException {
5091 return PARSER.parseFrom(input, extensionRegistry);
5092 }
5093 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse parseDelimitedFrom(java.io.InputStream input)
5094 throws java.io.IOException {
5095 return PARSER.parseDelimitedFrom(input);
5096 }
5097 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse parseDelimitedFrom(
5098 java.io.InputStream input,
5099 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5100 throws java.io.IOException {
5101 return PARSER.parseDelimitedFrom(input, extensionRegistry);
5102 }
5103 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse parseFrom(
5104 com.google.protobuf.CodedInputStream input)
5105 throws java.io.IOException {
5106 return PARSER.parseFrom(input);
5107 }
5108 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse parseFrom(
5109 com.google.protobuf.CodedInputStream input,
5110 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5111 throws java.io.IOException {
5112 return PARSER.parseFrom(input, extensionRegistry);
5113 }
5114
5115 public static Builder newBuilder() { return Builder.create(); }
5116 public Builder newBuilderForType() { return newBuilder(); }
5117 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse prototype) {
5118 return newBuilder().mergeFrom(prototype);
5119 }
5120 public Builder toBuilder() { return newBuilder(this); }
5121
5122 @java.lang.Override
5123 protected Builder newBuilderForType(
5124 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
5125 Builder builder = new Builder(parent);
5126 return builder;
5127 }
5128
5129
5130
5131 public static final class Builder extends
5132 com.google.protobuf.GeneratedMessage.Builder<Builder>
5133 implements org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponseOrBuilder {
5134 public static final com.google.protobuf.Descriptors.Descriptor
5135 getDescriptor() {
5136 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_GetLastFlushedSequenceIdResponse_descriptor;
5137 }
5138
5139 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
5140 internalGetFieldAccessorTable() {
5141 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_GetLastFlushedSequenceIdResponse_fieldAccessorTable
5142 .ensureFieldAccessorsInitialized(
5143 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse.class, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse.Builder.class);
5144 }
5145
5146
5147 private Builder() {
5148 maybeForceBuilderInitialization();
5149 }
5150
5151 private Builder(
5152 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
5153 super(parent);
5154 maybeForceBuilderInitialization();
5155 }
5156 private void maybeForceBuilderInitialization() {
5157 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
5158 getStoreLastFlushedSequenceIdFieldBuilder();
5159 }
5160 }
5161 private static Builder create() {
5162 return new Builder();
5163 }
5164
5165 public Builder clear() {
5166 super.clear();
5167 lastFlushedSequenceId_ = 0L;
5168 bitField0_ = (bitField0_ & ~0x00000001);
5169 if (storeLastFlushedSequenceIdBuilder_ == null) {
5170 storeLastFlushedSequenceId_ = java.util.Collections.emptyList();
5171 bitField0_ = (bitField0_ & ~0x00000002);
5172 } else {
5173 storeLastFlushedSequenceIdBuilder_.clear();
5174 }
5175 return this;
5176 }
5177
5178 public Builder clone() {
5179 return create().mergeFrom(buildPartial());
5180 }
5181
5182 public com.google.protobuf.Descriptors.Descriptor
5183 getDescriptorForType() {
5184 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_GetLastFlushedSequenceIdResponse_descriptor;
5185 }
5186
5187 public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse getDefaultInstanceForType() {
5188 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse.getDefaultInstance();
5189 }
5190
5191 public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse build() {
5192 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse result = buildPartial();
5193 if (!result.isInitialized()) {
5194 throw newUninitializedMessageException(result);
5195 }
5196 return result;
5197 }
5198
5199 public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse buildPartial() {
5200 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse(this);
5201 int from_bitField0_ = bitField0_;
5202 int to_bitField0_ = 0;
5203 if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
5204 to_bitField0_ |= 0x00000001;
5205 }
5206 result.lastFlushedSequenceId_ = lastFlushedSequenceId_;
5207 if (storeLastFlushedSequenceIdBuilder_ == null) {
5208 if (((bitField0_ & 0x00000002) == 0x00000002)) {
5209 storeLastFlushedSequenceId_ = java.util.Collections.unmodifiableList(storeLastFlushedSequenceId_);
5210 bitField0_ = (bitField0_ & ~0x00000002);
5211 }
5212 result.storeLastFlushedSequenceId_ = storeLastFlushedSequenceId_;
5213 } else {
5214 result.storeLastFlushedSequenceId_ = storeLastFlushedSequenceIdBuilder_.build();
5215 }
5216 result.bitField0_ = to_bitField0_;
5217 onBuilt();
5218 return result;
5219 }
5220
5221 public Builder mergeFrom(com.google.protobuf.Message other) {
5222 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse) {
5223 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse)other);
5224 } else {
5225 super.mergeFrom(other);
5226 return this;
5227 }
5228 }
5229
5230 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse other) {
5231 if (other == org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse.getDefaultInstance()) return this;
5232 if (other.hasLastFlushedSequenceId()) {
5233 setLastFlushedSequenceId(other.getLastFlushedSequenceId());
5234 }
5235 if (storeLastFlushedSequenceIdBuilder_ == null) {
5236 if (!other.storeLastFlushedSequenceId_.isEmpty()) {
5237 if (storeLastFlushedSequenceId_.isEmpty()) {
5238 storeLastFlushedSequenceId_ = other.storeLastFlushedSequenceId_;
5239 bitField0_ = (bitField0_ & ~0x00000002);
5240 } else {
5241 ensureStoreLastFlushedSequenceIdIsMutable();
5242 storeLastFlushedSequenceId_.addAll(other.storeLastFlushedSequenceId_);
5243 }
5244 onChanged();
5245 }
5246 } else {
5247 if (!other.storeLastFlushedSequenceId_.isEmpty()) {
5248 if (storeLastFlushedSequenceIdBuilder_.isEmpty()) {
5249 storeLastFlushedSequenceIdBuilder_.dispose();
5250 storeLastFlushedSequenceIdBuilder_ = null;
5251 storeLastFlushedSequenceId_ = other.storeLastFlushedSequenceId_;
5252 bitField0_ = (bitField0_ & ~0x00000002);
5253 storeLastFlushedSequenceIdBuilder_ =
5254 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
5255 getStoreLastFlushedSequenceIdFieldBuilder() : null;
5256 } else {
5257 storeLastFlushedSequenceIdBuilder_.addAllMessages(other.storeLastFlushedSequenceId_);
5258 }
5259 }
5260 }
5261 this.mergeUnknownFields(other.getUnknownFields());
5262 return this;
5263 }
5264
5265 public final boolean isInitialized() {
5266 if (!hasLastFlushedSequenceId()) {
5267
5268 return false;
5269 }
5270 for (int i = 0; i < getStoreLastFlushedSequenceIdCount(); i++) {
5271 if (!getStoreLastFlushedSequenceId(i).isInitialized()) {
5272
5273 return false;
5274 }
5275 }
5276 return true;
5277 }
5278
5279 public Builder mergeFrom(
5280 com.google.protobuf.CodedInputStream input,
5281 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5282 throws java.io.IOException {
5283 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse parsedMessage = null;
5284 try {
5285 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
5286 } catch (com.google.protobuf.InvalidProtocolBufferException e) {
5287 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse) e.getUnfinishedMessage();
5288 throw e;
5289 } finally {
5290 if (parsedMessage != null) {
5291 mergeFrom(parsedMessage);
5292 }
5293 }
5294 return this;
5295 }
5296 private int bitField0_;
5297
5298
5299 private long lastFlushedSequenceId_ ;
5300
5301
5302
5303
5304
5305
5306
5307 public boolean hasLastFlushedSequenceId() {
5308 return ((bitField0_ & 0x00000001) == 0x00000001);
5309 }
5310
5311
5312
5313
5314
5315
5316
5317 public long getLastFlushedSequenceId() {
5318 return lastFlushedSequenceId_;
5319 }
5320
5321
5322
5323
5324
5325
5326
5327 public Builder setLastFlushedSequenceId(long value) {
5328 bitField0_ |= 0x00000001;
5329 lastFlushedSequenceId_ = value;
5330 onChanged();
5331 return this;
5332 }
5333
5334
5335
5336
5337
5338
5339
5340 public Builder clearLastFlushedSequenceId() {
5341 bitField0_ = (bitField0_ & ~0x00000001);
5342 lastFlushedSequenceId_ = 0L;
5343 onChanged();
5344 return this;
5345 }
5346
5347
5348 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId> storeLastFlushedSequenceId_ =
5349 java.util.Collections.emptyList();
5350 private void ensureStoreLastFlushedSequenceIdIsMutable() {
5351 if (!((bitField0_ & 0x00000002) == 0x00000002)) {
5352 storeLastFlushedSequenceId_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId>(storeLastFlushedSequenceId_);
5353 bitField0_ |= 0x00000002;
5354 }
5355 }
5356
5357 private com.google.protobuf.RepeatedFieldBuilder<
5358 org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceIdOrBuilder> storeLastFlushedSequenceIdBuilder_;
5359
5360
5361
5362
5363
5364
5365
5366
5367 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId> getStoreLastFlushedSequenceIdList() {
5368 if (storeLastFlushedSequenceIdBuilder_ == null) {
5369 return java.util.Collections.unmodifiableList(storeLastFlushedSequenceId_);
5370 } else {
5371 return storeLastFlushedSequenceIdBuilder_.getMessageList();
5372 }
5373 }
5374
5375
5376
5377
5378
5379
5380
5381 public int getStoreLastFlushedSequenceIdCount() {
5382 if (storeLastFlushedSequenceIdBuilder_ == null) {
5383 return storeLastFlushedSequenceId_.size();
5384 } else {
5385 return storeLastFlushedSequenceIdBuilder_.getCount();
5386 }
5387 }
5388
5389
5390
5391
5392
5393
5394
5395 public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId getStoreLastFlushedSequenceId(int index) {
5396 if (storeLastFlushedSequenceIdBuilder_ == null) {
5397 return storeLastFlushedSequenceId_.get(index);
5398 } else {
5399 return storeLastFlushedSequenceIdBuilder_.getMessage(index);
5400 }
5401 }
5402
5403
5404
5405
5406
5407
5408
5409 public Builder setStoreLastFlushedSequenceId(
5410 int index, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId value) {
5411 if (storeLastFlushedSequenceIdBuilder_ == null) {
5412 if (value == null) {
5413 throw new NullPointerException();
5414 }
5415 ensureStoreLastFlushedSequenceIdIsMutable();
5416 storeLastFlushedSequenceId_.set(index, value);
5417 onChanged();
5418 } else {
5419 storeLastFlushedSequenceIdBuilder_.setMessage(index, value);
5420 }
5421 return this;
5422 }
5423
5424
5425
5426
5427
5428
5429
5430 public Builder setStoreLastFlushedSequenceId(
5431 int index, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId.Builder builderForValue) {
5432 if (storeLastFlushedSequenceIdBuilder_ == null) {
5433 ensureStoreLastFlushedSequenceIdIsMutable();
5434 storeLastFlushedSequenceId_.set(index, builderForValue.build());
5435 onChanged();
5436 } else {
5437 storeLastFlushedSequenceIdBuilder_.setMessage(index, builderForValue.build());
5438 }
5439 return this;
5440 }
5441
5442
5443
5444
5445
5446
5447
5448 public Builder addStoreLastFlushedSequenceId(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId value) {
5449 if (storeLastFlushedSequenceIdBuilder_ == null) {
5450 if (value == null) {
5451 throw new NullPointerException();
5452 }
5453 ensureStoreLastFlushedSequenceIdIsMutable();
5454 storeLastFlushedSequenceId_.add(value);
5455 onChanged();
5456 } else {
5457 storeLastFlushedSequenceIdBuilder_.addMessage(value);
5458 }
5459 return this;
5460 }
5461
5462
5463
5464
5465
5466
5467
5468 public Builder addStoreLastFlushedSequenceId(
5469 int index, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId value) {
5470 if (storeLastFlushedSequenceIdBuilder_ == null) {
5471 if (value == null) {
5472 throw new NullPointerException();
5473 }
5474 ensureStoreLastFlushedSequenceIdIsMutable();
5475 storeLastFlushedSequenceId_.add(index, value);
5476 onChanged();
5477 } else {
5478 storeLastFlushedSequenceIdBuilder_.addMessage(index, value);
5479 }
5480 return this;
5481 }
5482
5483
5484
5485
5486
5487
5488
5489 public Builder addStoreLastFlushedSequenceId(
5490 org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId.Builder builderForValue) {
5491 if (storeLastFlushedSequenceIdBuilder_ == null) {
5492 ensureStoreLastFlushedSequenceIdIsMutable();
5493 storeLastFlushedSequenceId_.add(builderForValue.build());
5494 onChanged();
5495 } else {
5496 storeLastFlushedSequenceIdBuilder_.addMessage(builderForValue.build());
5497 }
5498 return this;
5499 }
5500
5501
5502
5503
5504
5505
5506
5507 public Builder addStoreLastFlushedSequenceId(
5508 int index, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId.Builder builderForValue) {
5509 if (storeLastFlushedSequenceIdBuilder_ == null) {
5510 ensureStoreLastFlushedSequenceIdIsMutable();
5511 storeLastFlushedSequenceId_.add(index, builderForValue.build());
5512 onChanged();
5513 } else {
5514 storeLastFlushedSequenceIdBuilder_.addMessage(index, builderForValue.build());
5515 }
5516 return this;
5517 }
5518
5519
5520
5521
5522
5523
5524
5525 public Builder addAllStoreLastFlushedSequenceId(
5526 java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId> values) {
5527 if (storeLastFlushedSequenceIdBuilder_ == null) {
5528 ensureStoreLastFlushedSequenceIdIsMutable();
5529 super.addAll(values, storeLastFlushedSequenceId_);
5530 onChanged();
5531 } else {
5532 storeLastFlushedSequenceIdBuilder_.addAllMessages(values);
5533 }
5534 return this;
5535 }
5536
5537
5538
5539
5540
5541
5542
5543 public Builder clearStoreLastFlushedSequenceId() {
5544 if (storeLastFlushedSequenceIdBuilder_ == null) {
5545 storeLastFlushedSequenceId_ = java.util.Collections.emptyList();
5546 bitField0_ = (bitField0_ & ~0x00000002);
5547 onChanged();
5548 } else {
5549 storeLastFlushedSequenceIdBuilder_.clear();
5550 }
5551 return this;
5552 }
5553
5554
5555
5556
5557
5558
5559
5560 public Builder removeStoreLastFlushedSequenceId(int index) {
5561 if (storeLastFlushedSequenceIdBuilder_ == null) {
5562 ensureStoreLastFlushedSequenceIdIsMutable();
5563 storeLastFlushedSequenceId_.remove(index);
5564 onChanged();
5565 } else {
5566 storeLastFlushedSequenceIdBuilder_.remove(index);
5567 }
5568 return this;
5569 }
5570
5571
5572
5573
5574
5575
5576
5577 public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId.Builder getStoreLastFlushedSequenceIdBuilder(
5578 int index) {
5579 return getStoreLastFlushedSequenceIdFieldBuilder().getBuilder(index);
5580 }
5581
5582
5583
5584
5585
5586
5587
5588 public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceIdOrBuilder getStoreLastFlushedSequenceIdOrBuilder(
5589 int index) {
5590 if (storeLastFlushedSequenceIdBuilder_ == null) {
5591 return storeLastFlushedSequenceId_.get(index); } else {
5592 return storeLastFlushedSequenceIdBuilder_.getMessageOrBuilder(index);
5593 }
5594 }
5595
5596
5597
5598
5599
5600
5601
5602 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceIdOrBuilder>
5603 getStoreLastFlushedSequenceIdOrBuilderList() {
5604 if (storeLastFlushedSequenceIdBuilder_ != null) {
5605 return storeLastFlushedSequenceIdBuilder_.getMessageOrBuilderList();
5606 } else {
5607 return java.util.Collections.unmodifiableList(storeLastFlushedSequenceId_);
5608 }
5609 }
5610
5611
5612
5613
5614
5615
5616
5617 public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId.Builder addStoreLastFlushedSequenceIdBuilder() {
5618 return getStoreLastFlushedSequenceIdFieldBuilder().addBuilder(
5619 org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId.getDefaultInstance());
5620 }
5621
5622
5623
5624
5625
5626
5627
5628 public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId.Builder addStoreLastFlushedSequenceIdBuilder(
5629 int index) {
5630 return getStoreLastFlushedSequenceIdFieldBuilder().addBuilder(
5631 index, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId.getDefaultInstance());
5632 }
5633
5634
5635
5636
5637
5638
5639
5640 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId.Builder>
5641 getStoreLastFlushedSequenceIdBuilderList() {
5642 return getStoreLastFlushedSequenceIdFieldBuilder().getBuilderList();
5643 }
5644 private com.google.protobuf.RepeatedFieldBuilder<
5645 org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceIdOrBuilder>
5646 getStoreLastFlushedSequenceIdFieldBuilder() {
5647 if (storeLastFlushedSequenceIdBuilder_ == null) {
5648 storeLastFlushedSequenceIdBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
5649 org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceIdOrBuilder>(
5650 storeLastFlushedSequenceId_,
5651 ((bitField0_ & 0x00000002) == 0x00000002),
5652 getParentForChildren(),
5653 isClean());
5654 storeLastFlushedSequenceId_ = null;
5655 }
5656 return storeLastFlushedSequenceIdBuilder_;
5657 }
5658
5659
5660 }
5661
5662 static {
5663 defaultInstance = new GetLastFlushedSequenceIdResponse(true);
5664 defaultInstance.initFields();
5665 }
5666
5667
5668 }
5669
5670 public interface RegionStateTransitionOrBuilder
5671 extends com.google.protobuf.MessageOrBuilder {
5672
5673
5674
5675
5676
5677 boolean hasTransitionCode();
5678
5679
5680
5681 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode getTransitionCode();
5682
5683
5684
5685
5686
5687
5688
5689
5690
5691 java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo>
5692 getRegionInfoList();
5693
5694
5695
5696
5697
5698
5699
5700 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(int index);
5701
5702
5703
5704
5705
5706
5707
5708 int getRegionInfoCount();
5709
5710
5711
5712
5713
5714
5715
5716 java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>
5717 getRegionInfoOrBuilderList();
5718
5719
5720
5721
5722
5723
5724
5725 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder(
5726 int index);
5727
5728
5729
5730
5731
5732
5733
5734
5735
5736 boolean hasOpenSeqNum();
5737
5738
5739
5740
5741
5742
5743
5744 long getOpenSeqNum();
5745 }
5746
5747
5748
5749 public static final class RegionStateTransition extends
5750 com.google.protobuf.GeneratedMessage
5751 implements RegionStateTransitionOrBuilder {
5752
5753 private RegionStateTransition(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
5754 super(builder);
5755 this.unknownFields = builder.getUnknownFields();
5756 }
5757 private RegionStateTransition(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
5758
5759 private static final RegionStateTransition defaultInstance;
5760 public static RegionStateTransition getDefaultInstance() {
5761 return defaultInstance;
5762 }
5763
5764 public RegionStateTransition getDefaultInstanceForType() {
5765 return defaultInstance;
5766 }
5767
5768 private final com.google.protobuf.UnknownFieldSet unknownFields;
5769 @java.lang.Override
5770 public final com.google.protobuf.UnknownFieldSet
5771 getUnknownFields() {
5772 return this.unknownFields;
5773 }
5774 private RegionStateTransition(
5775 com.google.protobuf.CodedInputStream input,
5776 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5777 throws com.google.protobuf.InvalidProtocolBufferException {
5778 initFields();
5779 int mutable_bitField0_ = 0;
5780 com.google.protobuf.UnknownFieldSet.Builder unknownFields =
5781 com.google.protobuf.UnknownFieldSet.newBuilder();
5782 try {
5783 boolean done = false;
5784 while (!done) {
5785 int tag = input.readTag();
5786 switch (tag) {
5787 case 0:
5788 done = true;
5789 break;
5790 default: {
5791 if (!parseUnknownField(input, unknownFields,
5792 extensionRegistry, tag)) {
5793 done = true;
5794 }
5795 break;
5796 }
5797 case 8: {
5798 int rawValue = input.readEnum();
5799 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode value = org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode.valueOf(rawValue);
5800 if (value == null) {
5801 unknownFields.mergeVarintField(1, rawValue);
5802 } else {
5803 bitField0_ |= 0x00000001;
5804 transitionCode_ = value;
5805 }
5806 break;
5807 }
5808 case 18: {
5809 if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
5810 regionInfo_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo>();
5811 mutable_bitField0_ |= 0x00000002;
5812 }
5813 regionInfo_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.PARSER, extensionRegistry));
5814 break;
5815 }
5816 case 24: {
5817 bitField0_ |= 0x00000002;
5818 openSeqNum_ = input.readUInt64();
5819 break;
5820 }
5821 }
5822 }
5823 } catch (com.google.protobuf.InvalidProtocolBufferException e) {
5824 throw e.setUnfinishedMessage(this);
5825 } catch (java.io.IOException e) {
5826 throw new com.google.protobuf.InvalidProtocolBufferException(
5827 e.getMessage()).setUnfinishedMessage(this);
5828 } finally {
5829 if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
5830 regionInfo_ = java.util.Collections.unmodifiableList(regionInfo_);
5831 }
5832 this.unknownFields = unknownFields.build();
5833 makeExtensionsImmutable();
5834 }
5835 }
5836 public static final com.google.protobuf.Descriptors.Descriptor
5837 getDescriptor() {
5838 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionStateTransition_descriptor;
5839 }
5840
5841 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
5842 internalGetFieldAccessorTable() {
5843 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionStateTransition_fieldAccessorTable
5844 .ensureFieldAccessorsInitialized(
5845 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.class, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.Builder.class);
5846 }
5847
5848 public static com.google.protobuf.Parser<RegionStateTransition> PARSER =
5849 new com.google.protobuf.AbstractParser<RegionStateTransition>() {
5850 public RegionStateTransition parsePartialFrom(
5851 com.google.protobuf.CodedInputStream input,
5852 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5853 throws com.google.protobuf.InvalidProtocolBufferException {
5854 return new RegionStateTransition(input, extensionRegistry);
5855 }
5856 };
5857
5858 @java.lang.Override
5859 public com.google.protobuf.Parser<RegionStateTransition> getParserForType() {
5860 return PARSER;
5861 }
5862
5863
5864
5865
5866 public enum TransitionCode
5867 implements com.google.protobuf.ProtocolMessageEnum {
5868
5869
5870
5871 OPENED(0, 0),
5872
5873
5874
5875 FAILED_OPEN(1, 1),
5876
5877
5878
5879
5880
5881
5882
5883 CLOSED(2, 2),
5884
5885
5886
5887
5888
5889
5890
5891 READY_TO_SPLIT(3, 3),
5892
5893
5894
5895 READY_TO_MERGE(4, 4),
5896
5897
5898
5899 SPLIT_PONR(5, 5),
5900
5901
5902
5903 MERGE_PONR(6, 6),
5904
5905
5906
5907 SPLIT(7, 7),
5908
5909
5910
5911 MERGED(8, 8),
5912
5913
5914
5915 SPLIT_REVERTED(9, 9),
5916
5917
5918
5919 MERGE_REVERTED(10, 10),
5920 ;
5921
5922
5923
5924
5925 public static final int OPENED_VALUE = 0;
5926
5927
5928
5929 public static final int FAILED_OPEN_VALUE = 1;
5930
5931
5932
5933
5934
5935
5936
5937 public static final int CLOSED_VALUE = 2;
5938
5939
5940
5941
5942
5943
5944
5945 public static final int READY_TO_SPLIT_VALUE = 3;
5946
5947
5948
5949 public static final int READY_TO_MERGE_VALUE = 4;
5950
5951
5952
5953 public static final int SPLIT_PONR_VALUE = 5;
5954
5955
5956
5957 public static final int MERGE_PONR_VALUE = 6;
5958
5959
5960
5961 public static final int SPLIT_VALUE = 7;
5962
5963
5964
5965 public static final int MERGED_VALUE = 8;
5966
5967
5968
5969 public static final int SPLIT_REVERTED_VALUE = 9;
5970
5971
5972
5973 public static final int MERGE_REVERTED_VALUE = 10;
5974
5975
5976 public final int getNumber() { return value; }
5977
5978 public static TransitionCode valueOf(int value) {
5979 switch (value) {
5980 case 0: return OPENED;
5981 case 1: return FAILED_OPEN;
5982 case 2: return CLOSED;
5983 case 3: return READY_TO_SPLIT;
5984 case 4: return READY_TO_MERGE;
5985 case 5: return SPLIT_PONR;
5986 case 6: return MERGE_PONR;
5987 case 7: return SPLIT;
5988 case 8: return MERGED;
5989 case 9: return SPLIT_REVERTED;
5990 case 10: return MERGE_REVERTED;
5991 default: return null;
5992 }
5993 }
5994
5995 public static com.google.protobuf.Internal.EnumLiteMap<TransitionCode>
5996 internalGetValueMap() {
5997 return internalValueMap;
5998 }
5999 private static com.google.protobuf.Internal.EnumLiteMap<TransitionCode>
6000 internalValueMap =
6001 new com.google.protobuf.Internal.EnumLiteMap<TransitionCode>() {
6002 public TransitionCode findValueByNumber(int number) {
6003 return TransitionCode.valueOf(number);
6004 }
6005 };
6006
6007 public final com.google.protobuf.Descriptors.EnumValueDescriptor
6008 getValueDescriptor() {
6009 return getDescriptor().getValues().get(index);
6010 }
6011 public final com.google.protobuf.Descriptors.EnumDescriptor
6012 getDescriptorForType() {
6013 return getDescriptor();
6014 }
6015 public static final com.google.protobuf.Descriptors.EnumDescriptor
6016 getDescriptor() {
6017 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.getDescriptor().getEnumTypes().get(0);
6018 }
6019
6020 private static final TransitionCode[] VALUES = values();
6021
6022 public static TransitionCode valueOf(
6023 com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
6024 if (desc.getType() != getDescriptor()) {
6025 throw new java.lang.IllegalArgumentException(
6026 "EnumValueDescriptor is not for this type.");
6027 }
6028 return VALUES[desc.getIndex()];
6029 }
6030
6031 private final int index;
6032 private final int value;
6033
6034 private TransitionCode(int index, int value) {
6035 this.index = index;
6036 this.value = value;
6037 }
6038
6039
6040 }
6041
6042 private int bitField0_;
6043
6044 public static final int TRANSITION_CODE_FIELD_NUMBER = 1;
6045 private org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode transitionCode_;
6046
6047
6048
6049 public boolean hasTransitionCode() {
6050 return ((bitField0_ & 0x00000001) == 0x00000001);
6051 }
6052
6053
6054
6055 public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode getTransitionCode() {
6056 return transitionCode_;
6057 }
6058
6059
6060 public static final int REGION_INFO_FIELD_NUMBER = 2;
6061 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo> regionInfo_;
6062
6063
6064
6065
6066
6067
6068
6069 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo> getRegionInfoList() {
6070 return regionInfo_;
6071 }
6072
6073
6074
6075
6076
6077
6078
6079 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>
6080 getRegionInfoOrBuilderList() {
6081 return regionInfo_;
6082 }
6083
6084
6085
6086
6087
6088
6089
6090 public int getRegionInfoCount() {
6091 return regionInfo_.size();
6092 }
6093
6094
6095
6096
6097
6098
6099
6100 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(int index) {
6101 return regionInfo_.get(index);
6102 }
6103
6104
6105
6106
6107
6108
6109
6110 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder(
6111 int index) {
6112 return regionInfo_.get(index);
6113 }
6114
6115
6116 public static final int OPEN_SEQ_NUM_FIELD_NUMBER = 3;
6117 private long openSeqNum_;
6118
6119
6120
6121
6122
6123
6124
6125 public boolean hasOpenSeqNum() {
6126 return ((bitField0_ & 0x00000002) == 0x00000002);
6127 }
6128
6129
6130
6131
6132
6133
6134
6135 public long getOpenSeqNum() {
6136 return openSeqNum_;
6137 }
6138
6139 private void initFields() {
6140 transitionCode_ = org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode.OPENED;
6141 regionInfo_ = java.util.Collections.emptyList();
6142 openSeqNum_ = 0L;
6143 }
6144 private byte memoizedIsInitialized = -1;
6145 public final boolean isInitialized() {
6146 byte isInitialized = memoizedIsInitialized;
6147 if (isInitialized != -1) return isInitialized == 1;
6148
6149 if (!hasTransitionCode()) {
6150 memoizedIsInitialized = 0;
6151 return false;
6152 }
6153 for (int i = 0; i < getRegionInfoCount(); i++) {
6154 if (!getRegionInfo(i).isInitialized()) {
6155 memoizedIsInitialized = 0;
6156 return false;
6157 }
6158 }
6159 memoizedIsInitialized = 1;
6160 return true;
6161 }
6162
6163 public void writeTo(com.google.protobuf.CodedOutputStream output)
6164 throws java.io.IOException {
6165 getSerializedSize();
6166 if (((bitField0_ & 0x00000001) == 0x00000001)) {
6167 output.writeEnum(1, transitionCode_.getNumber());
6168 }
6169 for (int i = 0; i < regionInfo_.size(); i++) {
6170 output.writeMessage(2, regionInfo_.get(i));
6171 }
6172 if (((bitField0_ & 0x00000002) == 0x00000002)) {
6173 output.writeUInt64(3, openSeqNum_);
6174 }
6175 getUnknownFields().writeTo(output);
6176 }
6177
6178 private int memoizedSerializedSize = -1;
6179 public int getSerializedSize() {
6180 int size = memoizedSerializedSize;
6181 if (size != -1) return size;
6182
6183 size = 0;
6184 if (((bitField0_ & 0x00000001) == 0x00000001)) {
6185 size += com.google.protobuf.CodedOutputStream
6186 .computeEnumSize(1, transitionCode_.getNumber());
6187 }
6188 for (int i = 0; i < regionInfo_.size(); i++) {
6189 size += com.google.protobuf.CodedOutputStream
6190 .computeMessageSize(2, regionInfo_.get(i));
6191 }
6192 if (((bitField0_ & 0x00000002) == 0x00000002)) {
6193 size += com.google.protobuf.CodedOutputStream
6194 .computeUInt64Size(3, openSeqNum_);
6195 }
6196 size += getUnknownFields().getSerializedSize();
6197 memoizedSerializedSize = size;
6198 return size;
6199 }
6200
6201 private static final long serialVersionUID = 0L;
6202 @java.lang.Override
6203 protected java.lang.Object writeReplace()
6204 throws java.io.ObjectStreamException {
6205 return super.writeReplace();
6206 }
6207
6208 @java.lang.Override
6209 public boolean equals(final java.lang.Object obj) {
6210 if (obj == this) {
6211 return true;
6212 }
6213 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition)) {
6214 return super.equals(obj);
6215 }
6216 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition other = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition) obj;
6217
6218 boolean result = true;
6219 result = result && (hasTransitionCode() == other.hasTransitionCode());
6220 if (hasTransitionCode()) {
6221 result = result &&
6222 (getTransitionCode() == other.getTransitionCode());
6223 }
6224 result = result && getRegionInfoList()
6225 .equals(other.getRegionInfoList());
6226 result = result && (hasOpenSeqNum() == other.hasOpenSeqNum());
6227 if (hasOpenSeqNum()) {
6228 result = result && (getOpenSeqNum()
6229 == other.getOpenSeqNum());
6230 }
6231 result = result &&
6232 getUnknownFields().equals(other.getUnknownFields());
6233 return result;
6234 }
6235
6236 private int memoizedHashCode = 0;
6237 @java.lang.Override
6238 public int hashCode() {
6239 if (memoizedHashCode != 0) {
6240 return memoizedHashCode;
6241 }
6242 int hash = 41;
6243 hash = (19 * hash) + getDescriptorForType().hashCode();
6244 if (hasTransitionCode()) {
6245 hash = (37 * hash) + TRANSITION_CODE_FIELD_NUMBER;
6246 hash = (53 * hash) + hashEnum(getTransitionCode());
6247 }
6248 if (getRegionInfoCount() > 0) {
6249 hash = (37 * hash) + REGION_INFO_FIELD_NUMBER;
6250 hash = (53 * hash) + getRegionInfoList().hashCode();
6251 }
6252 if (hasOpenSeqNum()) {
6253 hash = (37 * hash) + OPEN_SEQ_NUM_FIELD_NUMBER;
6254 hash = (53 * hash) + hashLong(getOpenSeqNum());
6255 }
6256 hash = (29 * hash) + getUnknownFields().hashCode();
6257 memoizedHashCode = hash;
6258 return hash;
6259 }
6260
6261 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition parseFrom(
6262 com.google.protobuf.ByteString data)
6263 throws com.google.protobuf.InvalidProtocolBufferException {
6264 return PARSER.parseFrom(data);
6265 }
6266 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition parseFrom(
6267 com.google.protobuf.ByteString data,
6268 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6269 throws com.google.protobuf.InvalidProtocolBufferException {
6270 return PARSER.parseFrom(data, extensionRegistry);
6271 }
6272 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition parseFrom(byte[] data)
6273 throws com.google.protobuf.InvalidProtocolBufferException {
6274 return PARSER.parseFrom(data);
6275 }
6276 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition parseFrom(
6277 byte[] data,
6278 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6279 throws com.google.protobuf.InvalidProtocolBufferException {
6280 return PARSER.parseFrom(data, extensionRegistry);
6281 }
6282 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition parseFrom(java.io.InputStream input)
6283 throws java.io.IOException {
6284 return PARSER.parseFrom(input);
6285 }
6286 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition parseFrom(
6287 java.io.InputStream input,
6288 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6289 throws java.io.IOException {
6290 return PARSER.parseFrom(input, extensionRegistry);
6291 }
6292 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition parseDelimitedFrom(java.io.InputStream input)
6293 throws java.io.IOException {
6294 return PARSER.parseDelimitedFrom(input);
6295 }
6296 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition parseDelimitedFrom(
6297 java.io.InputStream input,
6298 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6299 throws java.io.IOException {
6300 return PARSER.parseDelimitedFrom(input, extensionRegistry);
6301 }
6302 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition parseFrom(
6303 com.google.protobuf.CodedInputStream input)
6304 throws java.io.IOException {
6305 return PARSER.parseFrom(input);
6306 }
6307 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition parseFrom(
6308 com.google.protobuf.CodedInputStream input,
6309 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6310 throws java.io.IOException {
6311 return PARSER.parseFrom(input, extensionRegistry);
6312 }
6313
6314 public static Builder newBuilder() { return Builder.create(); }
6315 public Builder newBuilderForType() { return newBuilder(); }
6316 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition prototype) {
6317 return newBuilder().mergeFrom(prototype);
6318 }
6319 public Builder toBuilder() { return newBuilder(this); }
6320
6321 @java.lang.Override
6322 protected Builder newBuilderForType(
6323 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
6324 Builder builder = new Builder(parent);
6325 return builder;
6326 }
6327
6328
6329
6330 public static final class Builder extends
6331 com.google.protobuf.GeneratedMessage.Builder<Builder>
6332 implements org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransitionOrBuilder {
6333 public static final com.google.protobuf.Descriptors.Descriptor
6334 getDescriptor() {
6335 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionStateTransition_descriptor;
6336 }
6337
6338 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
6339 internalGetFieldAccessorTable() {
6340 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionStateTransition_fieldAccessorTable
6341 .ensureFieldAccessorsInitialized(
6342 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.class, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.Builder.class);
6343 }
6344
6345
6346 private Builder() {
6347 maybeForceBuilderInitialization();
6348 }
6349
6350 private Builder(
6351 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
6352 super(parent);
6353 maybeForceBuilderInitialization();
6354 }
6355 private void maybeForceBuilderInitialization() {
6356 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
6357 getRegionInfoFieldBuilder();
6358 }
6359 }
6360 private static Builder create() {
6361 return new Builder();
6362 }
6363
6364 public Builder clear() {
6365 super.clear();
6366 transitionCode_ = org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode.OPENED;
6367 bitField0_ = (bitField0_ & ~0x00000001);
6368 if (regionInfoBuilder_ == null) {
6369 regionInfo_ = java.util.Collections.emptyList();
6370 bitField0_ = (bitField0_ & ~0x00000002);
6371 } else {
6372 regionInfoBuilder_.clear();
6373 }
6374 openSeqNum_ = 0L;
6375 bitField0_ = (bitField0_ & ~0x00000004);
6376 return this;
6377 }
6378
6379 public Builder clone() {
6380 return create().mergeFrom(buildPartial());
6381 }
6382
6383 public com.google.protobuf.Descriptors.Descriptor
6384 getDescriptorForType() {
6385 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionStateTransition_descriptor;
6386 }
6387
6388 public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition getDefaultInstanceForType() {
6389 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.getDefaultInstance();
6390 }
6391
6392 public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition build() {
6393 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition result = buildPartial();
6394 if (!result.isInitialized()) {
6395 throw newUninitializedMessageException(result);
6396 }
6397 return result;
6398 }
6399
6400 public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition buildPartial() {
6401 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition result = new org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition(this);
6402 int from_bitField0_ = bitField0_;
6403 int to_bitField0_ = 0;
6404 if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
6405 to_bitField0_ |= 0x00000001;
6406 }
6407 result.transitionCode_ = transitionCode_;
6408 if (regionInfoBuilder_ == null) {
6409 if (((bitField0_ & 0x00000002) == 0x00000002)) {
6410 regionInfo_ = java.util.Collections.unmodifiableList(regionInfo_);
6411 bitField0_ = (bitField0_ & ~0x00000002);
6412 }
6413 result.regionInfo_ = regionInfo_;
6414 } else {
6415 result.regionInfo_ = regionInfoBuilder_.build();
6416 }
6417 if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
6418 to_bitField0_ |= 0x00000002;
6419 }
6420 result.openSeqNum_ = openSeqNum_;
6421 result.bitField0_ = to_bitField0_;
6422 onBuilt();
6423 return result;
6424 }
6425
6426 public Builder mergeFrom(com.google.protobuf.Message other) {
6427 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition) {
6428 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition)other);
6429 } else {
6430 super.mergeFrom(other);
6431 return this;
6432 }
6433 }
6434
6435 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition other) {
6436 if (other == org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.getDefaultInstance()) return this;
6437 if (other.hasTransitionCode()) {
6438 setTransitionCode(other.getTransitionCode());
6439 }
6440 if (regionInfoBuilder_ == null) {
6441 if (!other.regionInfo_.isEmpty()) {
6442 if (regionInfo_.isEmpty()) {
6443 regionInfo_ = other.regionInfo_;
6444 bitField0_ = (bitField0_ & ~0x00000002);
6445 } else {
6446 ensureRegionInfoIsMutable();
6447 regionInfo_.addAll(other.regionInfo_);
6448 }
6449 onChanged();
6450 }
6451 } else {
6452 if (!other.regionInfo_.isEmpty()) {
6453 if (regionInfoBuilder_.isEmpty()) {
6454 regionInfoBuilder_.dispose();
6455 regionInfoBuilder_ = null;
6456 regionInfo_ = other.regionInfo_;
6457 bitField0_ = (bitField0_ & ~0x00000002);
6458 regionInfoBuilder_ =
6459 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
6460 getRegionInfoFieldBuilder() : null;
6461 } else {
6462 regionInfoBuilder_.addAllMessages(other.regionInfo_);
6463 }
6464 }
6465 }
6466 if (other.hasOpenSeqNum()) {
6467 setOpenSeqNum(other.getOpenSeqNum());
6468 }
6469 this.mergeUnknownFields(other.getUnknownFields());
6470 return this;
6471 }
6472
6473 public final boolean isInitialized() {
6474 if (!hasTransitionCode()) {
6475
6476 return false;
6477 }
6478 for (int i = 0; i < getRegionInfoCount(); i++) {
6479 if (!getRegionInfo(i).isInitialized()) {
6480
6481 return false;
6482 }
6483 }
6484 return true;
6485 }
6486
6487 public Builder mergeFrom(
6488 com.google.protobuf.CodedInputStream input,
6489 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6490 throws java.io.IOException {
6491 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition parsedMessage = null;
6492 try {
6493 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
6494 } catch (com.google.protobuf.InvalidProtocolBufferException e) {
6495 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition) e.getUnfinishedMessage();
6496 throw e;
6497 } finally {
6498 if (parsedMessage != null) {
6499 mergeFrom(parsedMessage);
6500 }
6501 }
6502 return this;
6503 }
6504 private int bitField0_;
6505
6506
6507 private org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode transitionCode_ = org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode.OPENED;
6508
6509
6510
6511 public boolean hasTransitionCode() {
6512 return ((bitField0_ & 0x00000001) == 0x00000001);
6513 }
6514
6515
6516
6517 public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode getTransitionCode() {
6518 return transitionCode_;
6519 }
6520
6521
6522
6523 public Builder setTransitionCode(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode value) {
6524 if (value == null) {
6525 throw new NullPointerException();
6526 }
6527 bitField0_ |= 0x00000001;
6528 transitionCode_ = value;
6529 onChanged();
6530 return this;
6531 }
6532
6533
6534
6535 public Builder clearTransitionCode() {
6536 bitField0_ = (bitField0_ & ~0x00000001);
6537 transitionCode_ = org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode.OPENED;
6538 onChanged();
6539 return this;
6540 }
6541
6542
6543 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo> regionInfo_ =
6544 java.util.Collections.emptyList();
6545 private void ensureRegionInfoIsMutable() {
6546 if (!((bitField0_ & 0x00000002) == 0x00000002)) {
6547 regionInfo_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo>(regionInfo_);
6548 bitField0_ |= 0x00000002;
6549 }
6550 }
6551
6552 private com.google.protobuf.RepeatedFieldBuilder<
6553 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionInfoBuilder_;
6554
6555
6556
6557
6558
6559
6560
6561
6562 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo> getRegionInfoList() {
6563 if (regionInfoBuilder_ == null) {
6564 return java.util.Collections.unmodifiableList(regionInfo_);
6565 } else {
6566 return regionInfoBuilder_.getMessageList();
6567 }
6568 }
6569
6570
6571
6572
6573
6574
6575
6576 public int getRegionInfoCount() {
6577 if (regionInfoBuilder_ == null) {
6578 return regionInfo_.size();
6579 } else {
6580 return regionInfoBuilder_.getCount();
6581 }
6582 }
6583
6584
6585
6586
6587
6588
6589
6590 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(int index) {
6591 if (regionInfoBuilder_ == null) {
6592 return regionInfo_.get(index);
6593 } else {
6594 return regionInfoBuilder_.getMessage(index);
6595 }
6596 }
6597
6598
6599
6600
6601
6602
6603
6604 public Builder setRegionInfo(
6605 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) {
6606 if (regionInfoBuilder_ == null) {
6607 if (value == null) {
6608 throw new NullPointerException();
6609 }
6610 ensureRegionInfoIsMutable();
6611 regionInfo_.set(index, value);
6612 onChanged();
6613 } else {
6614 regionInfoBuilder_.setMessage(index, value);
6615 }
6616 return this;
6617 }
6618
6619
6620
6621
6622
6623
6624
6625 public Builder setRegionInfo(
6626 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) {
6627 if (regionInfoBuilder_ == null) {
6628 ensureRegionInfoIsMutable();
6629 regionInfo_.set(index, builderForValue.build());
6630 onChanged();
6631 } else {
6632 regionInfoBuilder_.setMessage(index, builderForValue.build());
6633 }
6634 return this;
6635 }
6636
6637
6638
6639
6640
6641
6642
6643 public Builder addRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) {
6644 if (regionInfoBuilder_ == null) {
6645 if (value == null) {
6646 throw new NullPointerException();
6647 }
6648 ensureRegionInfoIsMutable();
6649 regionInfo_.add(value);
6650 onChanged();
6651 } else {
6652 regionInfoBuilder_.addMessage(value);
6653 }
6654 return this;
6655 }
6656
6657
6658
6659
6660
6661
6662
6663 public Builder addRegionInfo(
6664 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) {
6665 if (regionInfoBuilder_ == null) {
6666 if (value == null) {
6667 throw new NullPointerException();
6668 }
6669 ensureRegionInfoIsMutable();
6670 regionInfo_.add(index, value);
6671 onChanged();
6672 } else {
6673 regionInfoBuilder_.addMessage(index, value);
6674 }
6675 return this;
6676 }
6677
6678
6679
6680
6681
6682
6683
6684 public Builder addRegionInfo(
6685 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) {
6686 if (regionInfoBuilder_ == null) {
6687 ensureRegionInfoIsMutable();
6688 regionInfo_.add(builderForValue.build());
6689 onChanged();
6690 } else {
6691 regionInfoBuilder_.addMessage(builderForValue.build());
6692 }
6693 return this;
6694 }
6695
6696
6697
6698
6699
6700
6701
6702 public Builder addRegionInfo(
6703 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) {
6704 if (regionInfoBuilder_ == null) {
6705 ensureRegionInfoIsMutable();
6706 regionInfo_.add(index, builderForValue.build());
6707 onChanged();
6708 } else {
6709 regionInfoBuilder_.addMessage(index, builderForValue.build());
6710 }
6711 return this;
6712 }
6713
6714
6715
6716
6717
6718
6719
6720 public Builder addAllRegionInfo(
6721 java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo> values) {
6722 if (regionInfoBuilder_ == null) {
6723 ensureRegionInfoIsMutable();
6724 super.addAll(values, regionInfo_);
6725 onChanged();
6726 } else {
6727 regionInfoBuilder_.addAllMessages(values);
6728 }
6729 return this;
6730 }
6731
6732
6733
6734
6735
6736
6737
6738 public Builder clearRegionInfo() {
6739 if (regionInfoBuilder_ == null) {
6740 regionInfo_ = java.util.Collections.emptyList();
6741 bitField0_ = (bitField0_ & ~0x00000002);
6742 onChanged();
6743 } else {
6744 regionInfoBuilder_.clear();
6745 }
6746 return this;
6747 }
6748
6749
6750
6751
6752
6753
6754
6755 public Builder removeRegionInfo(int index) {
6756 if (regionInfoBuilder_ == null) {
6757 ensureRegionInfoIsMutable();
6758 regionInfo_.remove(index);
6759 onChanged();
6760 } else {
6761 regionInfoBuilder_.remove(index);
6762 }
6763 return this;
6764 }
6765
6766
6767
6768
6769
6770
6771
6772 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder getRegionInfoBuilder(
6773 int index) {
6774 return getRegionInfoFieldBuilder().getBuilder(index);
6775 }
6776
6777
6778
6779
6780
6781
6782
6783 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder(
6784 int index) {
6785 if (regionInfoBuilder_ == null) {
6786 return regionInfo_.get(index); } else {
6787 return regionInfoBuilder_.getMessageOrBuilder(index);
6788 }
6789 }
6790
6791
6792
6793
6794
6795
6796
6797 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>
6798 getRegionInfoOrBuilderList() {
6799 if (regionInfoBuilder_ != null) {
6800 return regionInfoBuilder_.getMessageOrBuilderList();
6801 } else {
6802 return java.util.Collections.unmodifiableList(regionInfo_);
6803 }
6804 }
6805
6806
6807
6808
6809
6810
6811
6812 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder addRegionInfoBuilder() {
6813 return getRegionInfoFieldBuilder().addBuilder(
6814 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance());
6815 }
6816
6817
6818
6819
6820
6821
6822
6823 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder addRegionInfoBuilder(
6824 int index) {
6825 return getRegionInfoFieldBuilder().addBuilder(
6826 index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance());
6827 }
6828
6829
6830
6831
6832
6833
6834
6835 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder>
6836 getRegionInfoBuilderList() {
6837 return getRegionInfoFieldBuilder().getBuilderList();
6838 }
6839 private com.google.protobuf.RepeatedFieldBuilder<
6840 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>
6841 getRegionInfoFieldBuilder() {
6842 if (regionInfoBuilder_ == null) {
6843 regionInfoBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
6844 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>(
6845 regionInfo_,
6846 ((bitField0_ & 0x00000002) == 0x00000002),
6847 getParentForChildren(),
6848 isClean());
6849 regionInfo_ = null;
6850 }
6851 return regionInfoBuilder_;
6852 }
6853
6854
6855 private long openSeqNum_ ;
6856
6857
6858
6859
6860
6861
6862
6863 public boolean hasOpenSeqNum() {
6864 return ((bitField0_ & 0x00000004) == 0x00000004);
6865 }
6866
6867
6868
6869
6870
6871
6872
6873 public long getOpenSeqNum() {
6874 return openSeqNum_;
6875 }
6876
6877
6878
6879
6880
6881
6882
6883 public Builder setOpenSeqNum(long value) {
6884 bitField0_ |= 0x00000004;
6885 openSeqNum_ = value;
6886 onChanged();
6887 return this;
6888 }
6889
6890
6891
6892
6893
6894
6895
6896 public Builder clearOpenSeqNum() {
6897 bitField0_ = (bitField0_ & ~0x00000004);
6898 openSeqNum_ = 0L;
6899 onChanged();
6900 return this;
6901 }
6902
6903
6904 }
6905
6906 static {
6907 defaultInstance = new RegionStateTransition(true);
6908 defaultInstance.initFields();
6909 }
6910
6911
6912 }
6913
6914 public interface ReportRegionStateTransitionRequestOrBuilder
6915 extends com.google.protobuf.MessageOrBuilder {
6916
6917
6918
6919
6920
6921
6922
6923
6924
6925 boolean hasServer();
6926
6927
6928
6929
6930
6931
6932
6933 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer();
6934
6935
6936
6937
6938
6939
6940
6941 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder();
6942
6943
6944
6945
6946
6947 java.util.List<org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition>
6948 getTransitionList();
6949
6950
6951
6952 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition getTransition(int index);
6953
6954
6955
6956 int getTransitionCount();
6957
6958
6959
6960 java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransitionOrBuilder>
6961 getTransitionOrBuilderList();
6962
6963
6964
6965 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransitionOrBuilder getTransitionOrBuilder(
6966 int index);
6967 }
6968
6969
6970
6971 public static final class ReportRegionStateTransitionRequest extends
6972 com.google.protobuf.GeneratedMessage
6973 implements ReportRegionStateTransitionRequestOrBuilder {
6974
6975 private ReportRegionStateTransitionRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
6976 super(builder);
6977 this.unknownFields = builder.getUnknownFields();
6978 }
6979 private ReportRegionStateTransitionRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
6980
6981 private static final ReportRegionStateTransitionRequest defaultInstance;
6982 public static ReportRegionStateTransitionRequest getDefaultInstance() {
6983 return defaultInstance;
6984 }
6985
6986 public ReportRegionStateTransitionRequest getDefaultInstanceForType() {
6987 return defaultInstance;
6988 }
6989
6990 private final com.google.protobuf.UnknownFieldSet unknownFields;
6991 @java.lang.Override
6992 public final com.google.protobuf.UnknownFieldSet
6993 getUnknownFields() {
6994 return this.unknownFields;
6995 }
6996 private ReportRegionStateTransitionRequest(
6997 com.google.protobuf.CodedInputStream input,
6998 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6999 throws com.google.protobuf.InvalidProtocolBufferException {
7000 initFields();
7001 int mutable_bitField0_ = 0;
7002 com.google.protobuf.UnknownFieldSet.Builder unknownFields =
7003 com.google.protobuf.UnknownFieldSet.newBuilder();
7004 try {
7005 boolean done = false;
7006 while (!done) {
7007 int tag = input.readTag();
7008 switch (tag) {
7009 case 0:
7010 done = true;
7011 break;
7012 default: {
7013 if (!parseUnknownField(input, unknownFields,
7014 extensionRegistry, tag)) {
7015 done = true;
7016 }
7017 break;
7018 }
7019 case 10: {
7020 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = null;
7021 if (((bitField0_ & 0x00000001) == 0x00000001)) {
7022 subBuilder = server_.toBuilder();
7023 }
7024 server_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry);
7025 if (subBuilder != null) {
7026 subBuilder.mergeFrom(server_);
7027 server_ = subBuilder.buildPartial();
7028 }
7029 bitField0_ |= 0x00000001;
7030 break;
7031 }
7032 case 18: {
7033 if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
7034 transition_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition>();
7035 mutable_bitField0_ |= 0x00000002;
7036 }
7037 transition_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.PARSER, extensionRegistry));
7038 break;
7039 }
7040 }
7041 }
7042 } catch (com.google.protobuf.InvalidProtocolBufferException e) {
7043 throw e.setUnfinishedMessage(this);
7044 } catch (java.io.IOException e) {
7045 throw new com.google.protobuf.InvalidProtocolBufferException(
7046 e.getMessage()).setUnfinishedMessage(this);
7047 } finally {
7048 if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
7049 transition_ = java.util.Collections.unmodifiableList(transition_);
7050 }
7051 this.unknownFields = unknownFields.build();
7052 makeExtensionsImmutable();
7053 }
7054 }
7055 public static final com.google.protobuf.Descriptors.Descriptor
7056 getDescriptor() {
7057 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_ReportRegionStateTransitionRequest_descriptor;
7058 }
7059
7060 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
7061 internalGetFieldAccessorTable() {
7062 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_ReportRegionStateTransitionRequest_fieldAccessorTable
7063 .ensureFieldAccessorsInitialized(
7064 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest.class, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest.Builder.class);
7065 }
7066
7067 public static com.google.protobuf.Parser<ReportRegionStateTransitionRequest> PARSER =
7068 new com.google.protobuf.AbstractParser<ReportRegionStateTransitionRequest>() {
7069 public ReportRegionStateTransitionRequest parsePartialFrom(
7070 com.google.protobuf.CodedInputStream input,
7071 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7072 throws com.google.protobuf.InvalidProtocolBufferException {
7073 return new ReportRegionStateTransitionRequest(input, extensionRegistry);
7074 }
7075 };
7076
7077 @java.lang.Override
7078 public com.google.protobuf.Parser<ReportRegionStateTransitionRequest> getParserForType() {
7079 return PARSER;
7080 }
7081
7082 private int bitField0_;
7083
7084 public static final int SERVER_FIELD_NUMBER = 1;
7085 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName server_;
7086
7087
7088
7089
7090
7091
7092
7093 public boolean hasServer() {
7094 return ((bitField0_ & 0x00000001) == 0x00000001);
7095 }
7096
7097
7098
7099
7100
7101
7102
7103 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer() {
7104 return server_;
7105 }
7106
7107
7108
7109
7110
7111
7112
7113 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() {
7114 return server_;
7115 }
7116
7117
7118 public static final int TRANSITION_FIELD_NUMBER = 2;
7119 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition> transition_;
7120
7121
7122
7123 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition> getTransitionList() {
7124 return transition_;
7125 }
7126
7127
7128
7129 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransitionOrBuilder>
7130 getTransitionOrBuilderList() {
7131 return transition_;
7132 }
7133
7134
7135
7136 public int getTransitionCount() {
7137 return transition_.size();
7138 }
7139
7140
7141
7142 public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition getTransition(int index) {
7143 return transition_.get(index);
7144 }
7145
7146
7147
7148 public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransitionOrBuilder getTransitionOrBuilder(
7149 int index) {
7150 return transition_.get(index);
7151 }
7152
7153 private void initFields() {
7154 server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
7155 transition_ = java.util.Collections.emptyList();
7156 }
7157 private byte memoizedIsInitialized = -1;
7158 public final boolean isInitialized() {
7159 byte isInitialized = memoizedIsInitialized;
7160 if (isInitialized != -1) return isInitialized == 1;
7161
7162 if (!hasServer()) {
7163 memoizedIsInitialized = 0;
7164 return false;
7165 }
7166 if (!getServer().isInitialized()) {
7167 memoizedIsInitialized = 0;
7168 return false;
7169 }
7170 for (int i = 0; i < getTransitionCount(); i++) {
7171 if (!getTransition(i).isInitialized()) {
7172 memoizedIsInitialized = 0;
7173 return false;
7174 }
7175 }
7176 memoizedIsInitialized = 1;
7177 return true;
7178 }
7179
7180 public void writeTo(com.google.protobuf.CodedOutputStream output)
7181 throws java.io.IOException {
7182 getSerializedSize();
7183 if (((bitField0_ & 0x00000001) == 0x00000001)) {
7184 output.writeMessage(1, server_);
7185 }
7186 for (int i = 0; i < transition_.size(); i++) {
7187 output.writeMessage(2, transition_.get(i));
7188 }
7189 getUnknownFields().writeTo(output);
7190 }
7191
7192 private int memoizedSerializedSize = -1;
7193 public int getSerializedSize() {
7194 int size = memoizedSerializedSize;
7195 if (size != -1) return size;
7196
7197 size = 0;
7198 if (((bitField0_ & 0x00000001) == 0x00000001)) {
7199 size += com.google.protobuf.CodedOutputStream
7200 .computeMessageSize(1, server_);
7201 }
7202 for (int i = 0; i < transition_.size(); i++) {
7203 size += com.google.protobuf.CodedOutputStream
7204 .computeMessageSize(2, transition_.get(i));
7205 }
7206 size += getUnknownFields().getSerializedSize();
7207 memoizedSerializedSize = size;
7208 return size;
7209 }
7210
7211 private static final long serialVersionUID = 0L;
7212 @java.lang.Override
7213 protected java.lang.Object writeReplace()
7214 throws java.io.ObjectStreamException {
7215 return super.writeReplace();
7216 }
7217
7218 @java.lang.Override
7219 public boolean equals(final java.lang.Object obj) {
7220 if (obj == this) {
7221 return true;
7222 }
7223 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest)) {
7224 return super.equals(obj);
7225 }
7226 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest) obj;
7227
7228 boolean result = true;
7229 result = result && (hasServer() == other.hasServer());
7230 if (hasServer()) {
7231 result = result && getServer()
7232 .equals(other.getServer());
7233 }
7234 result = result && getTransitionList()
7235 .equals(other.getTransitionList());
7236 result = result &&
7237 getUnknownFields().equals(other.getUnknownFields());
7238 return result;
7239 }
7240
7241 private int memoizedHashCode = 0;
7242 @java.lang.Override
7243 public int hashCode() {
7244 if (memoizedHashCode != 0) {
7245 return memoizedHashCode;
7246 }
7247 int hash = 41;
7248 hash = (19 * hash) + getDescriptorForType().hashCode();
7249 if (hasServer()) {
7250 hash = (37 * hash) + SERVER_FIELD_NUMBER;
7251 hash = (53 * hash) + getServer().hashCode();
7252 }
7253 if (getTransitionCount() > 0) {
7254 hash = (37 * hash) + TRANSITION_FIELD_NUMBER;
7255 hash = (53 * hash) + getTransitionList().hashCode();
7256 }
7257 hash = (29 * hash) + getUnknownFields().hashCode();
7258 memoizedHashCode = hash;
7259 return hash;
7260 }
7261
7262 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest parseFrom(
7263 com.google.protobuf.ByteString data)
7264 throws com.google.protobuf.InvalidProtocolBufferException {
7265 return PARSER.parseFrom(data);
7266 }
7267 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest parseFrom(
7268 com.google.protobuf.ByteString data,
7269 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7270 throws com.google.protobuf.InvalidProtocolBufferException {
7271 return PARSER.parseFrom(data, extensionRegistry);
7272 }
7273 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest parseFrom(byte[] data)
7274 throws com.google.protobuf.InvalidProtocolBufferException {
7275 return PARSER.parseFrom(data);
7276 }
7277 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest parseFrom(
7278 byte[] data,
7279 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7280 throws com.google.protobuf.InvalidProtocolBufferException {
7281 return PARSER.parseFrom(data, extensionRegistry);
7282 }
7283 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest parseFrom(java.io.InputStream input)
7284 throws java.io.IOException {
7285 return PARSER.parseFrom(input);
7286 }
7287 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest parseFrom(
7288 java.io.InputStream input,
7289 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7290 throws java.io.IOException {
7291 return PARSER.parseFrom(input, extensionRegistry);
7292 }
7293 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest parseDelimitedFrom(java.io.InputStream input)
7294 throws java.io.IOException {
7295 return PARSER.parseDelimitedFrom(input);
7296 }
7297 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest parseDelimitedFrom(
7298 java.io.InputStream input,
7299 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7300 throws java.io.IOException {
7301 return PARSER.parseDelimitedFrom(input, extensionRegistry);
7302 }
7303 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest parseFrom(
7304 com.google.protobuf.CodedInputStream input)
7305 throws java.io.IOException {
7306 return PARSER.parseFrom(input);
7307 }
7308 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest parseFrom(
7309 com.google.protobuf.CodedInputStream input,
7310 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7311 throws java.io.IOException {
7312 return PARSER.parseFrom(input, extensionRegistry);
7313 }
7314
7315 public static Builder newBuilder() { return Builder.create(); }
7316 public Builder newBuilderForType() { return newBuilder(); }
7317 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest prototype) {
7318 return newBuilder().mergeFrom(prototype);
7319 }
7320 public Builder toBuilder() { return newBuilder(this); }
7321
7322 @java.lang.Override
7323 protected Builder newBuilderForType(
7324 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
7325 Builder builder = new Builder(parent);
7326 return builder;
7327 }
7328
7329
7330
7331 public static final class Builder extends
7332 com.google.protobuf.GeneratedMessage.Builder<Builder>
7333 implements org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequestOrBuilder {
7334 public static final com.google.protobuf.Descriptors.Descriptor
7335 getDescriptor() {
7336 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_ReportRegionStateTransitionRequest_descriptor;
7337 }
7338
7339 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
7340 internalGetFieldAccessorTable() {
7341 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_ReportRegionStateTransitionRequest_fieldAccessorTable
7342 .ensureFieldAccessorsInitialized(
7343 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest.class, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest.Builder.class);
7344 }
7345
7346
7347 private Builder() {
7348 maybeForceBuilderInitialization();
7349 }
7350
7351 private Builder(
7352 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
7353 super(parent);
7354 maybeForceBuilderInitialization();
7355 }
7356 private void maybeForceBuilderInitialization() {
7357 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
7358 getServerFieldBuilder();
7359 getTransitionFieldBuilder();
7360 }
7361 }
7362 private static Builder create() {
7363 return new Builder();
7364 }
7365
7366 public Builder clear() {
7367 super.clear();
7368 if (serverBuilder_ == null) {
7369 server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
7370 } else {
7371 serverBuilder_.clear();
7372 }
7373 bitField0_ = (bitField0_ & ~0x00000001);
7374 if (transitionBuilder_ == null) {
7375 transition_ = java.util.Collections.emptyList();
7376 bitField0_ = (bitField0_ & ~0x00000002);
7377 } else {
7378 transitionBuilder_.clear();
7379 }
7380 return this;
7381 }
7382
7383 public Builder clone() {
7384 return create().mergeFrom(buildPartial());
7385 }
7386
7387 public com.google.protobuf.Descriptors.Descriptor
7388 getDescriptorForType() {
7389 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_ReportRegionStateTransitionRequest_descriptor;
7390 }
7391
7392 public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest getDefaultInstanceForType() {
7393 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest.getDefaultInstance();
7394 }
7395
7396 public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest build() {
7397 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest result = buildPartial();
7398 if (!result.isInitialized()) {
7399 throw newUninitializedMessageException(result);
7400 }
7401 return result;
7402 }
7403
7404 public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest buildPartial() {
7405 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest(this);
7406 int from_bitField0_ = bitField0_;
7407 int to_bitField0_ = 0;
7408 if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
7409 to_bitField0_ |= 0x00000001;
7410 }
7411 if (serverBuilder_ == null) {
7412 result.server_ = server_;
7413 } else {
7414 result.server_ = serverBuilder_.build();
7415 }
7416 if (transitionBuilder_ == null) {
7417 if (((bitField0_ & 0x00000002) == 0x00000002)) {
7418 transition_ = java.util.Collections.unmodifiableList(transition_);
7419 bitField0_ = (bitField0_ & ~0x00000002);
7420 }
7421 result.transition_ = transition_;
7422 } else {
7423 result.transition_ = transitionBuilder_.build();
7424 }
7425 result.bitField0_ = to_bitField0_;
7426 onBuilt();
7427 return result;
7428 }
7429
7430 public Builder mergeFrom(com.google.protobuf.Message other) {
7431 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest) {
7432 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest)other);
7433 } else {
7434 super.mergeFrom(other);
7435 return this;
7436 }
7437 }
7438
7439 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest other) {
7440 if (other == org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest.getDefaultInstance()) return this;
7441 if (other.hasServer()) {
7442 mergeServer(other.getServer());
7443 }
7444 if (transitionBuilder_ == null) {
7445 if (!other.transition_.isEmpty()) {
7446 if (transition_.isEmpty()) {
7447 transition_ = other.transition_;
7448 bitField0_ = (bitField0_ & ~0x00000002);
7449 } else {
7450 ensureTransitionIsMutable();
7451 transition_.addAll(other.transition_);
7452 }
7453 onChanged();
7454 }
7455 } else {
7456 if (!other.transition_.isEmpty()) {
7457 if (transitionBuilder_.isEmpty()) {
7458 transitionBuilder_.dispose();
7459 transitionBuilder_ = null;
7460 transition_ = other.transition_;
7461 bitField0_ = (bitField0_ & ~0x00000002);
7462 transitionBuilder_ =
7463 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
7464 getTransitionFieldBuilder() : null;
7465 } else {
7466 transitionBuilder_.addAllMessages(other.transition_);
7467 }
7468 }
7469 }
7470 this.mergeUnknownFields(other.getUnknownFields());
7471 return this;
7472 }
7473
7474 public final boolean isInitialized() {
7475 if (!hasServer()) {
7476
7477 return false;
7478 }
7479 if (!getServer().isInitialized()) {
7480
7481 return false;
7482 }
7483 for (int i = 0; i < getTransitionCount(); i++) {
7484 if (!getTransition(i).isInitialized()) {
7485
7486 return false;
7487 }
7488 }
7489 return true;
7490 }
7491
7492 public Builder mergeFrom(
7493 com.google.protobuf.CodedInputStream input,
7494 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7495 throws java.io.IOException {
7496 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest parsedMessage = null;
7497 try {
7498 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
7499 } catch (com.google.protobuf.InvalidProtocolBufferException e) {
7500 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest) e.getUnfinishedMessage();
7501 throw e;
7502 } finally {
7503 if (parsedMessage != null) {
7504 mergeFrom(parsedMessage);
7505 }
7506 }
7507 return this;
7508 }
7509 private int bitField0_;
7510
7511
7512 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
7513 private com.google.protobuf.SingleFieldBuilder<
7514 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverBuilder_;
7515
7516
7517
7518
7519
7520
7521
7522 public boolean hasServer() {
7523 return ((bitField0_ & 0x00000001) == 0x00000001);
7524 }
7525
7526
7527
7528
7529
7530
7531
7532 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer() {
7533 if (serverBuilder_ == null) {
7534 return server_;
7535 } else {
7536 return serverBuilder_.getMessage();
7537 }
7538 }
7539
7540
7541
7542
7543
7544
7545
7546 public Builder setServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) {
7547 if (serverBuilder_ == null) {
7548 if (value == null) {
7549 throw new NullPointerException();
7550 }
7551 server_ = value;
7552 onChanged();
7553 } else {
7554 serverBuilder_.setMessage(value);
7555 }
7556 bitField0_ |= 0x00000001;
7557 return this;
7558 }
7559
7560
7561
7562
7563
7564
7565
7566 public Builder setServer(
7567 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) {
7568 if (serverBuilder_ == null) {
7569 server_ = builderForValue.build();
7570 onChanged();
7571 } else {
7572 serverBuilder_.setMessage(builderForValue.build());
7573 }
7574 bitField0_ |= 0x00000001;
7575 return this;
7576 }
7577
7578
7579
7580
7581
7582
7583
7584 public Builder mergeServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) {
7585 if (serverBuilder_ == null) {
7586 if (((bitField0_ & 0x00000001) == 0x00000001) &&
7587 server_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) {
7588 server_ =
7589 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(server_).mergeFrom(value).buildPartial();
7590 } else {
7591 server_ = value;
7592 }
7593 onChanged();
7594 } else {
7595 serverBuilder_.mergeFrom(value);
7596 }
7597 bitField0_ |= 0x00000001;
7598 return this;
7599 }
7600
7601
7602
7603
7604
7605
7606
7607 public Builder clearServer() {
7608 if (serverBuilder_ == null) {
7609 server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
7610 onChanged();
7611 } else {
7612 serverBuilder_.clear();
7613 }
7614 bitField0_ = (bitField0_ & ~0x00000001);
7615 return this;
7616 }
7617
7618
7619
7620
7621
7622
7623
7624 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getServerBuilder() {
7625 bitField0_ |= 0x00000001;
7626 onChanged();
7627 return getServerFieldBuilder().getBuilder();
7628 }
7629
7630
7631
7632
7633
7634
7635
7636 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() {
7637 if (serverBuilder_ != null) {
7638 return serverBuilder_.getMessageOrBuilder();
7639 } else {
7640 return server_;
7641 }
7642 }
7643
7644
7645
7646
7647
7648
7649
7650 private com.google.protobuf.SingleFieldBuilder<
7651 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>
7652 getServerFieldBuilder() {
7653 if (serverBuilder_ == null) {
7654 serverBuilder_ = new com.google.protobuf.SingleFieldBuilder<
7655 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>(
7656 server_,
7657 getParentForChildren(),
7658 isClean());
7659 server_ = null;
7660 }
7661 return serverBuilder_;
7662 }
7663
7664
7665 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition> transition_ =
7666 java.util.Collections.emptyList();
7667 private void ensureTransitionIsMutable() {
7668 if (!((bitField0_ & 0x00000002) == 0x00000002)) {
7669 transition_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition>(transition_);
7670 bitField0_ |= 0x00000002;
7671 }
7672 }
7673
7674 private com.google.protobuf.RepeatedFieldBuilder<
7675 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransitionOrBuilder> transitionBuilder_;
7676
7677
7678
7679
7680 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition> getTransitionList() {
7681 if (transitionBuilder_ == null) {
7682 return java.util.Collections.unmodifiableList(transition_);
7683 } else {
7684 return transitionBuilder_.getMessageList();
7685 }
7686 }
7687
7688
7689
7690 public int getTransitionCount() {
7691 if (transitionBuilder_ == null) {
7692 return transition_.size();
7693 } else {
7694 return transitionBuilder_.getCount();
7695 }
7696 }
7697
7698
7699
7700 public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition getTransition(int index) {
7701 if (transitionBuilder_ == null) {
7702 return transition_.get(index);
7703 } else {
7704 return transitionBuilder_.getMessage(index);
7705 }
7706 }
7707
7708
7709
7710 public Builder setTransition(
7711 int index, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition value) {
7712 if (transitionBuilder_ == null) {
7713 if (value == null) {
7714 throw new NullPointerException();
7715 }
7716 ensureTransitionIsMutable();
7717 transition_.set(index, value);
7718 onChanged();
7719 } else {
7720 transitionBuilder_.setMessage(index, value);
7721 }
7722 return this;
7723 }
7724
7725
7726
7727 public Builder setTransition(
7728 int index, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.Builder builderForValue) {
7729 if (transitionBuilder_ == null) {
7730 ensureTransitionIsMutable();
7731 transition_.set(index, builderForValue.build());
7732 onChanged();
7733 } else {
7734 transitionBuilder_.setMessage(index, builderForValue.build());
7735 }
7736 return this;
7737 }
7738
7739
7740
7741 public Builder addTransition(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition value) {
7742 if (transitionBuilder_ == null) {
7743 if (value == null) {
7744 throw new NullPointerException();
7745 }
7746 ensureTransitionIsMutable();
7747 transition_.add(value);
7748 onChanged();
7749 } else {
7750 transitionBuilder_.addMessage(value);
7751 }
7752 return this;
7753 }
7754
7755
7756
7757 public Builder addTransition(
7758 int index, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition value) {
7759 if (transitionBuilder_ == null) {
7760 if (value == null) {
7761 throw new NullPointerException();
7762 }
7763 ensureTransitionIsMutable();
7764 transition_.add(index, value);
7765 onChanged();
7766 } else {
7767 transitionBuilder_.addMessage(index, value);
7768 }
7769 return this;
7770 }
7771
7772
7773
7774 public Builder addTransition(
7775 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.Builder builderForValue) {
7776 if (transitionBuilder_ == null) {
7777 ensureTransitionIsMutable();
7778 transition_.add(builderForValue.build());
7779 onChanged();
7780 } else {
7781 transitionBuilder_.addMessage(builderForValue.build());
7782 }
7783 return this;
7784 }
7785
7786
7787
7788 public Builder addTransition(
7789 int index, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.Builder builderForValue) {
7790 if (transitionBuilder_ == null) {
7791 ensureTransitionIsMutable();
7792 transition_.add(index, builderForValue.build());
7793 onChanged();
7794 } else {
7795 transitionBuilder_.addMessage(index, builderForValue.build());
7796 }
7797 return this;
7798 }
7799
7800
7801
7802 public Builder addAllTransition(
7803 java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition> values) {
7804 if (transitionBuilder_ == null) {
7805 ensureTransitionIsMutable();
7806 super.addAll(values, transition_);
7807 onChanged();
7808 } else {
7809 transitionBuilder_.addAllMessages(values);
7810 }
7811 return this;
7812 }
7813
7814
7815
7816 public Builder clearTransition() {
7817 if (transitionBuilder_ == null) {
7818 transition_ = java.util.Collections.emptyList();
7819 bitField0_ = (bitField0_ & ~0x00000002);
7820 onChanged();
7821 } else {
7822 transitionBuilder_.clear();
7823 }
7824 return this;
7825 }
7826
7827
7828
7829 public Builder removeTransition(int index) {
7830 if (transitionBuilder_ == null) {
7831 ensureTransitionIsMutable();
7832 transition_.remove(index);
7833 onChanged();
7834 } else {
7835 transitionBuilder_.remove(index);
7836 }
7837 return this;
7838 }
7839
7840
7841
7842 public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.Builder getTransitionBuilder(
7843 int index) {
7844 return getTransitionFieldBuilder().getBuilder(index);
7845 }
7846
7847
7848
7849 public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransitionOrBuilder getTransitionOrBuilder(
7850 int index) {
7851 if (transitionBuilder_ == null) {
7852 return transition_.get(index); } else {
7853 return transitionBuilder_.getMessageOrBuilder(index);
7854 }
7855 }
7856
7857
7858
7859 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransitionOrBuilder>
7860 getTransitionOrBuilderList() {
7861 if (transitionBuilder_ != null) {
7862 return transitionBuilder_.getMessageOrBuilderList();
7863 } else {
7864 return java.util.Collections.unmodifiableList(transition_);
7865 }
7866 }
7867
7868
7869
7870 public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.Builder addTransitionBuilder() {
7871 return getTransitionFieldBuilder().addBuilder(
7872 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.getDefaultInstance());
7873 }
7874
7875
7876
7877 public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.Builder addTransitionBuilder(
7878 int index) {
7879 return getTransitionFieldBuilder().addBuilder(
7880 index, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.getDefaultInstance());
7881 }
7882
7883
7884
7885 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.Builder>
7886 getTransitionBuilderList() {
7887 return getTransitionFieldBuilder().getBuilderList();
7888 }
7889 private com.google.protobuf.RepeatedFieldBuilder<
7890 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransitionOrBuilder>
7891 getTransitionFieldBuilder() {
7892 if (transitionBuilder_ == null) {
7893 transitionBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
7894 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransitionOrBuilder>(
7895 transition_,
7896 ((bitField0_ & 0x00000002) == 0x00000002),
7897 getParentForChildren(),
7898 isClean());
7899 transition_ = null;
7900 }
7901 return transitionBuilder_;
7902 }
7903
7904
7905 }
7906
7907 static {
7908 defaultInstance = new ReportRegionStateTransitionRequest(true);
7909 defaultInstance.initFields();
7910 }
7911
7912
7913 }
7914
7915 public interface ReportRegionStateTransitionResponseOrBuilder
7916 extends com.google.protobuf.MessageOrBuilder {
7917
7918
7919
7920
7921
7922
7923
7924
7925
7926 boolean hasErrorMessage();
7927
7928
7929
7930
7931
7932
7933
7934 java.lang.String getErrorMessage();
7935
7936
7937
7938
7939
7940
7941
7942 com.google.protobuf.ByteString
7943 getErrorMessageBytes();
7944 }
7945
7946
7947
7948 public static final class ReportRegionStateTransitionResponse extends
7949 com.google.protobuf.GeneratedMessage
7950 implements ReportRegionStateTransitionResponseOrBuilder {
7951
7952 private ReportRegionStateTransitionResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
7953 super(builder);
7954 this.unknownFields = builder.getUnknownFields();
7955 }
7956 private ReportRegionStateTransitionResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
7957
7958 private static final ReportRegionStateTransitionResponse defaultInstance;
7959 public static ReportRegionStateTransitionResponse getDefaultInstance() {
7960 return defaultInstance;
7961 }
7962
7963 public ReportRegionStateTransitionResponse getDefaultInstanceForType() {
7964 return defaultInstance;
7965 }
7966
7967 private final com.google.protobuf.UnknownFieldSet unknownFields;
7968 @java.lang.Override
7969 public final com.google.protobuf.UnknownFieldSet
7970 getUnknownFields() {
7971 return this.unknownFields;
7972 }
7973 private ReportRegionStateTransitionResponse(
7974 com.google.protobuf.CodedInputStream input,
7975 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7976 throws com.google.protobuf.InvalidProtocolBufferException {
7977 initFields();
7978 int mutable_bitField0_ = 0;
7979 com.google.protobuf.UnknownFieldSet.Builder unknownFields =
7980 com.google.protobuf.UnknownFieldSet.newBuilder();
7981 try {
7982 boolean done = false;
7983 while (!done) {
7984 int tag = input.readTag();
7985 switch (tag) {
7986 case 0:
7987 done = true;
7988 break;
7989 default: {
7990 if (!parseUnknownField(input, unknownFields,
7991 extensionRegistry, tag)) {
7992 done = true;
7993 }
7994 break;
7995 }
7996 case 10: {
7997 bitField0_ |= 0x00000001;
7998 errorMessage_ = input.readBytes();
7999 break;
8000 }
8001 }
8002 }
8003 } catch (com.google.protobuf.InvalidProtocolBufferException e) {
8004 throw e.setUnfinishedMessage(this);
8005 } catch (java.io.IOException e) {
8006 throw new com.google.protobuf.InvalidProtocolBufferException(
8007 e.getMessage()).setUnfinishedMessage(this);
8008 } finally {
8009 this.unknownFields = unknownFields.build();
8010 makeExtensionsImmutable();
8011 }
8012 }
8013 public static final com.google.protobuf.Descriptors.Descriptor
8014 getDescriptor() {
8015 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_ReportRegionStateTransitionResponse_descriptor;
8016 }
8017
8018 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
8019 internalGetFieldAccessorTable() {
8020 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_ReportRegionStateTransitionResponse_fieldAccessorTable
8021 .ensureFieldAccessorsInitialized(
8022 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse.class, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse.Builder.class);
8023 }
8024
8025 public static com.google.protobuf.Parser<ReportRegionStateTransitionResponse> PARSER =
8026 new com.google.protobuf.AbstractParser<ReportRegionStateTransitionResponse>() {
8027 public ReportRegionStateTransitionResponse parsePartialFrom(
8028 com.google.protobuf.CodedInputStream input,
8029 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8030 throws com.google.protobuf.InvalidProtocolBufferException {
8031 return new ReportRegionStateTransitionResponse(input, extensionRegistry);
8032 }
8033 };
8034
8035 @java.lang.Override
8036 public com.google.protobuf.Parser<ReportRegionStateTransitionResponse> getParserForType() {
8037 return PARSER;
8038 }
8039
8040 private int bitField0_;
8041
8042 public static final int ERROR_MESSAGE_FIELD_NUMBER = 1;
8043 private java.lang.Object errorMessage_;
8044
8045
8046
8047
8048
8049
8050
8051 public boolean hasErrorMessage() {
8052 return ((bitField0_ & 0x00000001) == 0x00000001);
8053 }
8054
8055
8056
8057
8058
8059
8060
8061 public java.lang.String getErrorMessage() {
8062 java.lang.Object ref = errorMessage_;
8063 if (ref instanceof java.lang.String) {
8064 return (java.lang.String) ref;
8065 } else {
8066 com.google.protobuf.ByteString bs =
8067 (com.google.protobuf.ByteString) ref;
8068 java.lang.String s = bs.toStringUtf8();
8069 if (bs.isValidUtf8()) {
8070 errorMessage_ = s;
8071 }
8072 return s;
8073 }
8074 }
8075
8076
8077
8078
8079
8080
8081
8082 public com.google.protobuf.ByteString
8083 getErrorMessageBytes() {
8084 java.lang.Object ref = errorMessage_;
8085 if (ref instanceof java.lang.String) {
8086 com.google.protobuf.ByteString b =
8087 com.google.protobuf.ByteString.copyFromUtf8(
8088 (java.lang.String) ref);
8089 errorMessage_ = b;
8090 return b;
8091 } else {
8092 return (com.google.protobuf.ByteString) ref;
8093 }
8094 }
8095
8096 private void initFields() {
8097 errorMessage_ = "";
8098 }
8099 private byte memoizedIsInitialized = -1;
8100 public final boolean isInitialized() {
8101 byte isInitialized = memoizedIsInitialized;
8102 if (isInitialized != -1) return isInitialized == 1;
8103
8104 memoizedIsInitialized = 1;
8105 return true;
8106 }
8107
8108 public void writeTo(com.google.protobuf.CodedOutputStream output)
8109 throws java.io.IOException {
8110 getSerializedSize();
8111 if (((bitField0_ & 0x00000001) == 0x00000001)) {
8112 output.writeBytes(1, getErrorMessageBytes());
8113 }
8114 getUnknownFields().writeTo(output);
8115 }
8116
8117 private int memoizedSerializedSize = -1;
8118 public int getSerializedSize() {
8119 int size = memoizedSerializedSize;
8120 if (size != -1) return size;
8121
8122 size = 0;
8123 if (((bitField0_ & 0x00000001) == 0x00000001)) {
8124 size += com.google.protobuf.CodedOutputStream
8125 .computeBytesSize(1, getErrorMessageBytes());
8126 }
8127 size += getUnknownFields().getSerializedSize();
8128 memoizedSerializedSize = size;
8129 return size;
8130 }
8131
8132 private static final long serialVersionUID = 0L;
8133 @java.lang.Override
8134 protected java.lang.Object writeReplace()
8135 throws java.io.ObjectStreamException {
8136 return super.writeReplace();
8137 }
8138
8139 @java.lang.Override
8140 public boolean equals(final java.lang.Object obj) {
8141 if (obj == this) {
8142 return true;
8143 }
8144 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse)) {
8145 return super.equals(obj);
8146 }
8147 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse) obj;
8148
8149 boolean result = true;
8150 result = result && (hasErrorMessage() == other.hasErrorMessage());
8151 if (hasErrorMessage()) {
8152 result = result && getErrorMessage()
8153 .equals(other.getErrorMessage());
8154 }
8155 result = result &&
8156 getUnknownFields().equals(other.getUnknownFields());
8157 return result;
8158 }
8159
8160 private int memoizedHashCode = 0;
8161 @java.lang.Override
8162 public int hashCode() {
8163 if (memoizedHashCode != 0) {
8164 return memoizedHashCode;
8165 }
8166 int hash = 41;
8167 hash = (19 * hash) + getDescriptorForType().hashCode();
8168 if (hasErrorMessage()) {
8169 hash = (37 * hash) + ERROR_MESSAGE_FIELD_NUMBER;
8170 hash = (53 * hash) + getErrorMessage().hashCode();
8171 }
8172 hash = (29 * hash) + getUnknownFields().hashCode();
8173 memoizedHashCode = hash;
8174 return hash;
8175 }
8176
8177 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse parseFrom(
8178 com.google.protobuf.ByteString data)
8179 throws com.google.protobuf.InvalidProtocolBufferException {
8180 return PARSER.parseFrom(data);
8181 }
8182 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse parseFrom(
8183 com.google.protobuf.ByteString data,
8184 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8185 throws com.google.protobuf.InvalidProtocolBufferException {
8186 return PARSER.parseFrom(data, extensionRegistry);
8187 }
8188 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse parseFrom(byte[] data)
8189 throws com.google.protobuf.InvalidProtocolBufferException {
8190 return PARSER.parseFrom(data);
8191 }
8192 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse parseFrom(
8193 byte[] data,
8194 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8195 throws com.google.protobuf.InvalidProtocolBufferException {
8196 return PARSER.parseFrom(data, extensionRegistry);
8197 }
8198 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse parseFrom(java.io.InputStream input)
8199 throws java.io.IOException {
8200 return PARSER.parseFrom(input);
8201 }
8202 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse parseFrom(
8203 java.io.InputStream input,
8204 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8205 throws java.io.IOException {
8206 return PARSER.parseFrom(input, extensionRegistry);
8207 }
8208 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse parseDelimitedFrom(java.io.InputStream input)
8209 throws java.io.IOException {
8210 return PARSER.parseDelimitedFrom(input);
8211 }
8212 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse parseDelimitedFrom(
8213 java.io.InputStream input,
8214 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8215 throws java.io.IOException {
8216 return PARSER.parseDelimitedFrom(input, extensionRegistry);
8217 }
8218 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse parseFrom(
8219 com.google.protobuf.CodedInputStream input)
8220 throws java.io.IOException {
8221 return PARSER.parseFrom(input);
8222 }
8223 public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse parseFrom(
8224 com.google.protobuf.CodedInputStream input,
8225 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8226 throws java.io.IOException {
8227 return PARSER.parseFrom(input, extensionRegistry);
8228 }
8229
8230 public static Builder newBuilder() { return Builder.create(); }
8231 public Builder newBuilderForType() { return newBuilder(); }
8232 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse prototype) {
8233 return newBuilder().mergeFrom(prototype);
8234 }
8235 public Builder toBuilder() { return newBuilder(this); }
8236
8237 @java.lang.Override
8238 protected Builder newBuilderForType(
8239 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
8240 Builder builder = new Builder(parent);
8241 return builder;
8242 }
8243
8244
8245
8246 public static final class Builder extends
8247 com.google.protobuf.GeneratedMessage.Builder<Builder>
8248 implements org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponseOrBuilder {
8249 public static final com.google.protobuf.Descriptors.Descriptor
8250 getDescriptor() {
8251 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_ReportRegionStateTransitionResponse_descriptor;
8252 }
8253
8254 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
8255 internalGetFieldAccessorTable() {
8256 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_ReportRegionStateTransitionResponse_fieldAccessorTable
8257 .ensureFieldAccessorsInitialized(
8258 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse.class, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse.Builder.class);
8259 }
8260
8261
8262 private Builder() {
8263 maybeForceBuilderInitialization();
8264 }
8265
8266 private Builder(
8267 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
8268 super(parent);
8269 maybeForceBuilderInitialization();
8270 }
8271 private void maybeForceBuilderInitialization() {
8272 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
8273 }
8274 }
8275 private static Builder create() {
8276 return new Builder();
8277 }
8278
8279 public Builder clear() {
8280 super.clear();
8281 errorMessage_ = "";
8282 bitField0_ = (bitField0_ & ~0x00000001);
8283 return this;
8284 }
8285
8286 public Builder clone() {
8287 return create().mergeFrom(buildPartial());
8288 }
8289
8290 public com.google.protobuf.Descriptors.Descriptor
8291 getDescriptorForType() {
8292 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_ReportRegionStateTransitionResponse_descriptor;
8293 }
8294
8295 public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse getDefaultInstanceForType() {
8296 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse.getDefaultInstance();
8297 }
8298
8299 public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse build() {
8300 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse result = buildPartial();
8301 if (!result.isInitialized()) {
8302 throw newUninitializedMessageException(result);
8303 }
8304 return result;
8305 }
8306
8307 public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse buildPartial() {
8308 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse(this);
8309 int from_bitField0_ = bitField0_;
8310 int to_bitField0_ = 0;
8311 if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
8312 to_bitField0_ |= 0x00000001;
8313 }
8314 result.errorMessage_ = errorMessage_;
8315 result.bitField0_ = to_bitField0_;
8316 onBuilt();
8317 return result;
8318 }
8319
8320 public Builder mergeFrom(com.google.protobuf.Message other) {
8321 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse) {
8322 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse)other);
8323 } else {
8324 super.mergeFrom(other);
8325 return this;
8326 }
8327 }
8328
8329 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse other) {
8330 if (other == org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse.getDefaultInstance()) return this;
8331 if (other.hasErrorMessage()) {
8332 bitField0_ |= 0x00000001;
8333 errorMessage_ = other.errorMessage_;
8334 onChanged();
8335 }
8336 this.mergeUnknownFields(other.getUnknownFields());
8337 return this;
8338 }
8339
8340 public final boolean isInitialized() {
8341 return true;
8342 }
8343
8344 public Builder mergeFrom(
8345 com.google.protobuf.CodedInputStream input,
8346 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8347 throws java.io.IOException {
8348 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse parsedMessage = null;
8349 try {
8350 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
8351 } catch (com.google.protobuf.InvalidProtocolBufferException e) {
8352 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse) e.getUnfinishedMessage();
8353 throw e;
8354 } finally {
8355 if (parsedMessage != null) {
8356 mergeFrom(parsedMessage);
8357 }
8358 }
8359 return this;
8360 }
8361 private int bitField0_;
8362
8363
8364 private java.lang.Object errorMessage_ = "";
8365
8366
8367
8368
8369
8370
8371
8372 public boolean hasErrorMessage() {
8373 return ((bitField0_ & 0x00000001) == 0x00000001);
8374 }
8375
8376
8377
8378
8379
8380
8381
8382 public java.lang.String getErrorMessage() {
8383 java.lang.Object ref = errorMessage_;
8384 if (!(ref instanceof java.lang.String)) {
8385 java.lang.String s = ((com.google.protobuf.ByteString) ref)
8386 .toStringUtf8();
8387 errorMessage_ = s;
8388 return s;
8389 } else {
8390 return (java.lang.String) ref;
8391 }
8392 }
8393
8394
8395
8396
8397
8398
8399
8400 public com.google.protobuf.ByteString
8401 getErrorMessageBytes() {
8402 java.lang.Object ref = errorMessage_;
8403 if (ref instanceof String) {
8404 com.google.protobuf.ByteString b =
8405 com.google.protobuf.ByteString.copyFromUtf8(
8406 (java.lang.String) ref);
8407 errorMessage_ = b;
8408 return b;
8409 } else {
8410 return (com.google.protobuf.ByteString) ref;
8411 }
8412 }
8413
8414
8415
8416
8417
8418
8419
8420 public Builder setErrorMessage(
8421 java.lang.String value) {
8422 if (value == null) {
8423 throw new NullPointerException();
8424 }
8425 bitField0_ |= 0x00000001;
8426 errorMessage_ = value;
8427 onChanged();
8428 return this;
8429 }
8430
8431
8432
8433
8434
8435
8436
8437 public Builder clearErrorMessage() {
8438 bitField0_ = (bitField0_ & ~0x00000001);
8439 errorMessage_ = getDefaultInstance().getErrorMessage();
8440 onChanged();
8441 return this;
8442 }
8443
8444
8445
8446
8447
8448
8449
8450 public Builder setErrorMessageBytes(
8451 com.google.protobuf.ByteString value) {
8452 if (value == null) {
8453 throw new NullPointerException();
8454 }
8455 bitField0_ |= 0x00000001;
8456 errorMessage_ = value;
8457 onChanged();
8458 return this;
8459 }
8460
8461
8462 }
8463
8464 static {
8465 defaultInstance = new ReportRegionStateTransitionResponse(true);
8466 defaultInstance.initFields();
8467 }
8468
8469
8470 }
8471
8472
8473
8474
8475 public static abstract class RegionServerStatusService
8476 implements com.google.protobuf.Service {
8477 protected RegionServerStatusService() {}
8478
8479 public interface Interface {
8480
8481
8482
8483
8484
8485
8486
8487 public abstract void regionServerStartup(
8488 com.google.protobuf.RpcController controller,
8489 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest request,
8490 com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse> done);
8491
8492
8493
8494
8495
8496
8497
8498
8499 public abstract void regionServerReport(
8500 com.google.protobuf.RpcController controller,
8501 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest request,
8502 com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse> done);
8503
8504
8505
8506
8507
8508
8509
8510
8511
8512
8513 public abstract void reportRSFatalError(
8514 com.google.protobuf.RpcController controller,
8515 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest request,
8516 com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse> done);
8517
8518
8519
8520
8521
8522
8523
8524
8525
8526
8527 public abstract void getLastFlushedSequenceId(
8528 com.google.protobuf.RpcController controller,
8529 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest request,
8530 com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse> done);
8531
8532
8533
8534
8535
8536
8537
8538
8539
8540
8541
8542 public abstract void reportRegionStateTransition(
8543 com.google.protobuf.RpcController controller,
8544 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest request,
8545 com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse> done);
8546
8547 }
8548
8549 public static com.google.protobuf.Service newReflectiveService(
8550 final Interface impl) {
8551 return new RegionServerStatusService() {
8552 @java.lang.Override
8553 public void regionServerStartup(
8554 com.google.protobuf.RpcController controller,
8555 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest request,
8556 com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse> done) {
8557 impl.regionServerStartup(controller, request, done);
8558 }
8559
8560 @java.lang.Override
8561 public void regionServerReport(
8562 com.google.protobuf.RpcController controller,
8563 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest request,
8564 com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse> done) {
8565 impl.regionServerReport(controller, request, done);
8566 }
8567
8568 @java.lang.Override
8569 public void reportRSFatalError(
8570 com.google.protobuf.RpcController controller,
8571 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest request,
8572 com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse> done) {
8573 impl.reportRSFatalError(controller, request, done);
8574 }
8575
8576 @java.lang.Override
8577 public void getLastFlushedSequenceId(
8578 com.google.protobuf.RpcController controller,
8579 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest request,
8580 com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse> done) {
8581 impl.getLastFlushedSequenceId(controller, request, done);
8582 }
8583
8584 @java.lang.Override
8585 public void reportRegionStateTransition(
8586 com.google.protobuf.RpcController controller,
8587 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest request,
8588 com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse> done) {
8589 impl.reportRegionStateTransition(controller, request, done);
8590 }
8591
8592 };
8593 }
8594
8595 public static com.google.protobuf.BlockingService
8596 newReflectiveBlockingService(final BlockingInterface impl) {
8597 return new com.google.protobuf.BlockingService() {
8598 public final com.google.protobuf.Descriptors.ServiceDescriptor
8599 getDescriptorForType() {
8600 return getDescriptor();
8601 }
8602
8603 public final com.google.protobuf.Message callBlockingMethod(
8604 com.google.protobuf.Descriptors.MethodDescriptor method,
8605 com.google.protobuf.RpcController controller,
8606 com.google.protobuf.Message request)
8607 throws com.google.protobuf.ServiceException {
8608 if (method.getService() != getDescriptor()) {
8609 throw new java.lang.IllegalArgumentException(
8610 "Service.callBlockingMethod() given method descriptor for " +
8611 "wrong service type.");
8612 }
8613 switch(method.getIndex()) {
8614 case 0:
8615 return impl.regionServerStartup(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest)request);
8616 case 1:
8617 return impl.regionServerReport(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest)request);
8618 case 2:
8619 return impl.reportRSFatalError(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest)request);
8620 case 3:
8621 return impl.getLastFlushedSequenceId(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest)request);
8622 case 4:
8623 return impl.reportRegionStateTransition(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest)request);
8624 default:
8625 throw new java.lang.AssertionError("Can't get here.");
8626 }
8627 }
8628
8629 public final com.google.protobuf.Message
8630 getRequestPrototype(
8631 com.google.protobuf.Descriptors.MethodDescriptor method) {
8632 if (method.getService() != getDescriptor()) {
8633 throw new java.lang.IllegalArgumentException(
8634 "Service.getRequestPrototype() given method " +
8635 "descriptor for wrong service type.");
8636 }
8637 switch(method.getIndex()) {
8638 case 0:
8639 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest.getDefaultInstance();
8640 case 1:
8641 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest.getDefaultInstance();
8642 case 2:
8643 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest.getDefaultInstance();
8644 case 3:
8645 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest.getDefaultInstance();
8646 case 4:
8647 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest.getDefaultInstance();
8648 default:
8649 throw new java.lang.AssertionError("Can't get here.");
8650 }
8651 }
8652
8653 public final com.google.protobuf.Message
8654 getResponsePrototype(
8655 com.google.protobuf.Descriptors.MethodDescriptor method) {
8656 if (method.getService() != getDescriptor()) {
8657 throw new java.lang.IllegalArgumentException(
8658 "Service.getResponsePrototype() given method " +
8659 "descriptor for wrong service type.");
8660 }
8661 switch(method.getIndex()) {
8662 case 0:
8663 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.getDefaultInstance();
8664 case 1:
8665 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse.getDefaultInstance();
8666 case 2:
8667 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse.getDefaultInstance();
8668 case 3:
8669 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse.getDefaultInstance();
8670 case 4:
8671 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse.getDefaultInstance();
8672 default:
8673 throw new java.lang.AssertionError("Can't get here.");
8674 }
8675 }
8676
8677 };
8678 }
8679
8680
8681
8682
8683
8684
8685
8686
8687 public abstract void regionServerStartup(
8688 com.google.protobuf.RpcController controller,
8689 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest request,
8690 com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse> done);
8691
8692
8693
8694
8695
8696
8697
8698
8699 public abstract void regionServerReport(
8700 com.google.protobuf.RpcController controller,
8701 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest request,
8702 com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse> done);
8703
8704
8705
8706
8707
8708
8709
8710
8711
8712
8713 public abstract void reportRSFatalError(
8714 com.google.protobuf.RpcController controller,
8715 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest request,
8716 com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse> done);
8717
8718
8719
8720
8721
8722
8723
8724
8725
8726
8727 public abstract void getLastFlushedSequenceId(
8728 com.google.protobuf.RpcController controller,
8729 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest request,
8730 com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse> done);
8731
8732
8733
8734
8735
8736
8737
8738
8739
8740
8741
8742 public abstract void reportRegionStateTransition(
8743 com.google.protobuf.RpcController controller,
8744 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest request,
8745 com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse> done);
8746
8747 public static final
8748 com.google.protobuf.Descriptors.ServiceDescriptor
8749 getDescriptor() {
8750 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.getDescriptor().getServices().get(0);
8751 }
8752 public final com.google.protobuf.Descriptors.ServiceDescriptor
8753 getDescriptorForType() {
8754 return getDescriptor();
8755 }
8756
8757 public final void callMethod(
8758 com.google.protobuf.Descriptors.MethodDescriptor method,
8759 com.google.protobuf.RpcController controller,
8760 com.google.protobuf.Message request,
8761 com.google.protobuf.RpcCallback<
8762 com.google.protobuf.Message> done) {
8763 if (method.getService() != getDescriptor()) {
8764 throw new java.lang.IllegalArgumentException(
8765 "Service.callMethod() given method descriptor for wrong " +
8766 "service type.");
8767 }
8768 switch(method.getIndex()) {
8769 case 0:
8770 this.regionServerStartup(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest)request,
8771 com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse>specializeCallback(
8772 done));
8773 return;
8774 case 1:
8775 this.regionServerReport(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest)request,
8776 com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse>specializeCallback(
8777 done));
8778 return;
8779 case 2:
8780 this.reportRSFatalError(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest)request,
8781 com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse>specializeCallback(
8782 done));
8783 return;
8784 case 3:
8785 this.getLastFlushedSequenceId(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest)request,
8786 com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse>specializeCallback(
8787 done));
8788 return;
8789 case 4:
8790 this.reportRegionStateTransition(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest)request,
8791 com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse>specializeCallback(
8792 done));
8793 return;
8794 default:
8795 throw new java.lang.AssertionError("Can't get here.");
8796 }
8797 }
8798
8799 public final com.google.protobuf.Message
8800 getRequestPrototype(
8801 com.google.protobuf.Descriptors.MethodDescriptor method) {
8802 if (method.getService() != getDescriptor()) {
8803 throw new java.lang.IllegalArgumentException(
8804 "Service.getRequestPrototype() given method " +
8805 "descriptor for wrong service type.");
8806 }
8807 switch(method.getIndex()) {
8808 case 0:
8809 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest.getDefaultInstance();
8810 case 1:
8811 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest.getDefaultInstance();
8812 case 2:
8813 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest.getDefaultInstance();
8814 case 3:
8815 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest.getDefaultInstance();
8816 case 4:
8817 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest.getDefaultInstance();
8818 default:
8819 throw new java.lang.AssertionError("Can't get here.");
8820 }
8821 }
8822
8823 public final com.google.protobuf.Message
8824 getResponsePrototype(
8825 com.google.protobuf.Descriptors.MethodDescriptor method) {
8826 if (method.getService() != getDescriptor()) {
8827 throw new java.lang.IllegalArgumentException(
8828 "Service.getResponsePrototype() given method " +
8829 "descriptor for wrong service type.");
8830 }
8831 switch(method.getIndex()) {
8832 case 0:
8833 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.getDefaultInstance();
8834 case 1:
8835 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse.getDefaultInstance();
8836 case 2:
8837 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse.getDefaultInstance();
8838 case 3:
8839 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse.getDefaultInstance();
8840 case 4:
8841 return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse.getDefaultInstance();
8842 default:
8843 throw new java.lang.AssertionError("Can't get here.");
8844 }
8845 }
8846
8847 public static Stub newStub(
8848 com.google.protobuf.RpcChannel channel) {
8849 return new Stub(channel);
8850 }
8851
8852 public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStatusService implements Interface {
8853 private Stub(com.google.protobuf.RpcChannel channel) {
8854 this.channel = channel;
8855 }
8856
8857 private final com.google.protobuf.RpcChannel channel;
8858
8859 public com.google.protobuf.RpcChannel getChannel() {
8860 return channel;
8861 }
8862
8863 public void regionServerStartup(
8864 com.google.protobuf.RpcController controller,
8865 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest request,
8866 com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse> done) {
8867 channel.callMethod(
8868 getDescriptor().getMethods().get(0),
8869 controller,
8870 request,
8871 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.getDefaultInstance(),
8872 com.google.protobuf.RpcUtil.generalizeCallback(
8873 done,
8874 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.class,
8875 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.getDefaultInstance()));
8876 }
8877
8878 public void regionServerReport(
8879 com.google.protobuf.RpcController controller,
8880 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest request,
8881 com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse> done) {
8882 channel.callMethod(
8883 getDescriptor().getMethods().get(1),
8884 controller,
8885 request,
8886 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse.getDefaultInstance(),
8887 com.google.protobuf.RpcUtil.generalizeCallback(
8888 done,
8889 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse.class,
8890 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse.getDefaultInstance()));
8891 }
8892
8893 public void reportRSFatalError(
8894 com.google.protobuf.RpcController controller,
8895 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest request,
8896 com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse> done) {
8897 channel.callMethod(
8898 getDescriptor().getMethods().get(2),
8899 controller,
8900 request,
8901 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse.getDefaultInstance(),
8902 com.google.protobuf.RpcUtil.generalizeCallback(
8903 done,
8904 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse.class,
8905 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse.getDefaultInstance()));
8906 }
8907
8908 public void getLastFlushedSequenceId(
8909 com.google.protobuf.RpcController controller,
8910 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest request,
8911 com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse> done) {
8912 channel.callMethod(
8913 getDescriptor().getMethods().get(3),
8914 controller,
8915 request,
8916 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse.getDefaultInstance(),
8917 com.google.protobuf.RpcUtil.generalizeCallback(
8918 done,
8919 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse.class,
8920 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse.getDefaultInstance()));
8921 }
8922
8923 public void reportRegionStateTransition(
8924 com.google.protobuf.RpcController controller,
8925 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest request,
8926 com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse> done) {
8927 channel.callMethod(
8928 getDescriptor().getMethods().get(4),
8929 controller,
8930 request,
8931 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse.getDefaultInstance(),
8932 com.google.protobuf.RpcUtil.generalizeCallback(
8933 done,
8934 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse.class,
8935 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse.getDefaultInstance()));
8936 }
8937 }
8938
8939 public static BlockingInterface newBlockingStub(
8940 com.google.protobuf.BlockingRpcChannel channel) {
8941 return new BlockingStub(channel);
8942 }
8943
8944 public interface BlockingInterface {
8945 public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse regionServerStartup(
8946 com.google.protobuf.RpcController controller,
8947 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest request)
8948 throws com.google.protobuf.ServiceException;
8949
8950 public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse regionServerReport(
8951 com.google.protobuf.RpcController controller,
8952 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest request)
8953 throws com.google.protobuf.ServiceException;
8954
8955 public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse reportRSFatalError(
8956 com.google.protobuf.RpcController controller,
8957 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest request)
8958 throws com.google.protobuf.ServiceException;
8959
8960 public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse getLastFlushedSequenceId(
8961 com.google.protobuf.RpcController controller,
8962 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest request)
8963 throws com.google.protobuf.ServiceException;
8964
8965 public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse reportRegionStateTransition(
8966 com.google.protobuf.RpcController controller,
8967 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest request)
8968 throws com.google.protobuf.ServiceException;
8969 }
8970
8971 private static final class BlockingStub implements BlockingInterface {
8972 private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
8973 this.channel = channel;
8974 }
8975
8976 private final com.google.protobuf.BlockingRpcChannel channel;
8977
8978 public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse regionServerStartup(
8979 com.google.protobuf.RpcController controller,
8980 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest request)
8981 throws com.google.protobuf.ServiceException {
8982 return (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse) channel.callBlockingMethod(
8983 getDescriptor().getMethods().get(0),
8984 controller,
8985 request,
8986 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.getDefaultInstance());
8987 }
8988
8989
8990 public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse regionServerReport(
8991 com.google.protobuf.RpcController controller,
8992 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest request)
8993 throws com.google.protobuf.ServiceException {
8994 return (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse) channel.callBlockingMethod(
8995 getDescriptor().getMethods().get(1),
8996 controller,
8997 request,
8998 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse.getDefaultInstance());
8999 }
9000
9001
9002 public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse reportRSFatalError(
9003 com.google.protobuf.RpcController controller,
9004 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest request)
9005 throws com.google.protobuf.ServiceException {
9006 return (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse) channel.callBlockingMethod(
9007 getDescriptor().getMethods().get(2),
9008 controller,
9009 request,
9010 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse.getDefaultInstance());
9011 }
9012
9013
9014 public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse getLastFlushedSequenceId(
9015 com.google.protobuf.RpcController controller,
9016 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest request)
9017 throws com.google.protobuf.ServiceException {
9018 return (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse) channel.callBlockingMethod(
9019 getDescriptor().getMethods().get(3),
9020 controller,
9021 request,
9022 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse.getDefaultInstance());
9023 }
9024
9025
9026 public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse reportRegionStateTransition(
9027 com.google.protobuf.RpcController controller,
9028 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest request)
9029 throws com.google.protobuf.ServiceException {
9030 return (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse) channel.callBlockingMethod(
9031 getDescriptor().getMethods().get(4),
9032 controller,
9033 request,
9034 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse.getDefaultInstance());
9035 }
9036
9037 }
9038
9039
9040 }
9041
9042 private static com.google.protobuf.Descriptors.Descriptor
9043 internal_static_hbase_pb_RegionServerStartupRequest_descriptor;
9044 private static
9045 com.google.protobuf.GeneratedMessage.FieldAccessorTable
9046 internal_static_hbase_pb_RegionServerStartupRequest_fieldAccessorTable;
9047 private static com.google.protobuf.Descriptors.Descriptor
9048 internal_static_hbase_pb_RegionServerStartupResponse_descriptor;
9049 private static
9050 com.google.protobuf.GeneratedMessage.FieldAccessorTable
9051 internal_static_hbase_pb_RegionServerStartupResponse_fieldAccessorTable;
9052 private static com.google.protobuf.Descriptors.Descriptor
9053 internal_static_hbase_pb_RegionServerReportRequest_descriptor;
9054 private static
9055 com.google.protobuf.GeneratedMessage.FieldAccessorTable
9056 internal_static_hbase_pb_RegionServerReportRequest_fieldAccessorTable;
9057 private static com.google.protobuf.Descriptors.Descriptor
9058 internal_static_hbase_pb_RegionServerReportResponse_descriptor;
9059 private static
9060 com.google.protobuf.GeneratedMessage.FieldAccessorTable
9061 internal_static_hbase_pb_RegionServerReportResponse_fieldAccessorTable;
9062 private static com.google.protobuf.Descriptors.Descriptor
9063 internal_static_hbase_pb_ReportRSFatalErrorRequest_descriptor;
9064 private static
9065 com.google.protobuf.GeneratedMessage.FieldAccessorTable
9066 internal_static_hbase_pb_ReportRSFatalErrorRequest_fieldAccessorTable;
9067 private static com.google.protobuf.Descriptors.Descriptor
9068 internal_static_hbase_pb_ReportRSFatalErrorResponse_descriptor;
9069 private static
9070 com.google.protobuf.GeneratedMessage.FieldAccessorTable
9071 internal_static_hbase_pb_ReportRSFatalErrorResponse_fieldAccessorTable;
9072 private static com.google.protobuf.Descriptors.Descriptor
9073 internal_static_hbase_pb_GetLastFlushedSequenceIdRequest_descriptor;
9074 private static
9075 com.google.protobuf.GeneratedMessage.FieldAccessorTable
9076 internal_static_hbase_pb_GetLastFlushedSequenceIdRequest_fieldAccessorTable;
9077 private static com.google.protobuf.Descriptors.Descriptor
9078 internal_static_hbase_pb_GetLastFlushedSequenceIdResponse_descriptor;
9079 private static
9080 com.google.protobuf.GeneratedMessage.FieldAccessorTable
9081 internal_static_hbase_pb_GetLastFlushedSequenceIdResponse_fieldAccessorTable;
9082 private static com.google.protobuf.Descriptors.Descriptor
9083 internal_static_hbase_pb_RegionStateTransition_descriptor;
9084 private static
9085 com.google.protobuf.GeneratedMessage.FieldAccessorTable
9086 internal_static_hbase_pb_RegionStateTransition_fieldAccessorTable;
9087 private static com.google.protobuf.Descriptors.Descriptor
9088 internal_static_hbase_pb_ReportRegionStateTransitionRequest_descriptor;
9089 private static
9090 com.google.protobuf.GeneratedMessage.FieldAccessorTable
9091 internal_static_hbase_pb_ReportRegionStateTransitionRequest_fieldAccessorTable;
9092 private static com.google.protobuf.Descriptors.Descriptor
9093 internal_static_hbase_pb_ReportRegionStateTransitionResponse_descriptor;
9094 private static
9095 com.google.protobuf.GeneratedMessage.FieldAccessorTable
9096 internal_static_hbase_pb_ReportRegionStateTransitionResponse_fieldAccessorTable;
9097
9098 public static com.google.protobuf.Descriptors.FileDescriptor
9099 getDescriptor() {
9100 return descriptor;
9101 }
9102 private static com.google.protobuf.Descriptors.FileDescriptor
9103 descriptor;
9104 static {
9105 java.lang.String[] descriptorData = {
9106 "\n\030RegionServerStatus.proto\022\010hbase.pb\032\013HB" +
9107 "ase.proto\032\023ClusterStatus.proto\"\205\001\n\032Regio" +
9108 "nServerStartupRequest\022\014\n\004port\030\001 \002(\r\022\031\n\021s" +
9109 "erver_start_code\030\002 \002(\004\022\033\n\023server_current" +
9110 "_time\030\003 \002(\004\022!\n\031use_this_hostname_instead" +
9111 "\030\004 \001(\t\"L\n\033RegionServerStartupResponse\022-\n" +
9112 "\013map_entries\030\001 \003(\0132\030.hbase.pb.NameString" +
9113 "Pair\"e\n\031RegionServerReportRequest\022$\n\006ser" +
9114 "ver\030\001 \002(\0132\024.hbase.pb.ServerName\022\"\n\004load\030" +
9115 "\002 \001(\0132\024.hbase.pb.ServerLoad\"\034\n\032RegionSer",
9116 "verReportResponse\"X\n\031ReportRSFatalErrorR" +
9117 "equest\022$\n\006server\030\001 \002(\0132\024.hbase.pb.Server" +
9118 "Name\022\025\n\rerror_message\030\002 \002(\t\"\034\n\032ReportRSF" +
9119 "atalErrorResponse\"6\n\037GetLastFlushedSeque" +
9120 "nceIdRequest\022\023\n\013region_name\030\001 \002(\014\"\207\001\n Ge" +
9121 "tLastFlushedSequenceIdResponse\022 \n\030last_f" +
9122 "lushed_sequence_id\030\001 \002(\004\022A\n\036store_last_f" +
9123 "lushed_sequence_id\030\002 \003(\0132\031.hbase.pb.Stor" +
9124 "eSequenceId\"\344\002\n\025RegionStateTransition\022G\n" +
9125 "\017transition_code\030\001 \002(\0162..hbase.pb.Region",
9126 "StateTransition.TransitionCode\022)\n\013region" +
9127 "_info\030\002 \003(\0132\024.hbase.pb.RegionInfo\022\024\n\014ope" +
9128 "n_seq_num\030\003 \001(\004\"\300\001\n\016TransitionCode\022\n\n\006OP" +
9129 "ENED\020\000\022\017\n\013FAILED_OPEN\020\001\022\n\n\006CLOSED\020\002\022\022\n\016R" +
9130 "EADY_TO_SPLIT\020\003\022\022\n\016READY_TO_MERGE\020\004\022\016\n\nS" +
9131 "PLIT_PONR\020\005\022\016\n\nMERGE_PONR\020\006\022\t\n\005SPLIT\020\007\022\n" +
9132 "\n\006MERGED\020\010\022\022\n\016SPLIT_REVERTED\020\t\022\022\n\016MERGE_" +
9133 "REVERTED\020\n\"\177\n\"ReportRegionStateTransitio" +
9134 "nRequest\022$\n\006server\030\001 \002(\0132\024.hbase.pb.Serv" +
9135 "erName\0223\n\ntransition\030\002 \003(\0132\037.hbase.pb.Re",
9136 "gionStateTransition\"<\n#ReportRegionState" +
9137 "TransitionResponse\022\025\n\rerror_message\030\001 \001(" +
9138 "\t2\260\004\n\031RegionServerStatusService\022b\n\023Regio" +
9139 "nServerStartup\022$.hbase.pb.RegionServerSt" +
9140 "artupRequest\032%.hbase.pb.RegionServerStar" +
9141 "tupResponse\022_\n\022RegionServerReport\022#.hbas" +
9142 "e.pb.RegionServerReportRequest\032$.hbase.p" +
9143 "b.RegionServerReportResponse\022_\n\022ReportRS" +
9144 "FatalError\022#.hbase.pb.ReportRSFatalError" +
9145 "Request\032$.hbase.pb.ReportRSFatalErrorRes",
9146 "ponse\022q\n\030GetLastFlushedSequenceId\022).hbas" +
9147 "e.pb.GetLastFlushedSequenceIdRequest\032*.h" +
9148 "base.pb.GetLastFlushedSequenceIdResponse" +
9149 "\022z\n\033ReportRegionStateTransition\022,.hbase." +
9150 "pb.ReportRegionStateTransitionRequest\032-." +
9151 "hbase.pb.ReportRegionStateTransitionResp" +
9152 "onseBN\n*org.apache.hadoop.hbase.protobuf" +
9153 ".generatedB\030RegionServerStatusProtosH\001\210\001" +
9154 "\001\240\001\001"
9155 };
9156 com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
9157 new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
9158 public com.google.protobuf.ExtensionRegistry assignDescriptors(
9159 com.google.protobuf.Descriptors.FileDescriptor root) {
9160 descriptor = root;
9161 internal_static_hbase_pb_RegionServerStartupRequest_descriptor =
9162 getDescriptor().getMessageTypes().get(0);
9163 internal_static_hbase_pb_RegionServerStartupRequest_fieldAccessorTable = new
9164 com.google.protobuf.GeneratedMessage.FieldAccessorTable(
9165 internal_static_hbase_pb_RegionServerStartupRequest_descriptor,
9166 new java.lang.String[] { "Port", "ServerStartCode", "ServerCurrentTime", "UseThisHostnameInstead", });
9167 internal_static_hbase_pb_RegionServerStartupResponse_descriptor =
9168 getDescriptor().getMessageTypes().get(1);
9169 internal_static_hbase_pb_RegionServerStartupResponse_fieldAccessorTable = new
9170 com.google.protobuf.GeneratedMessage.FieldAccessorTable(
9171 internal_static_hbase_pb_RegionServerStartupResponse_descriptor,
9172 new java.lang.String[] { "MapEntries", });
9173 internal_static_hbase_pb_RegionServerReportRequest_descriptor =
9174 getDescriptor().getMessageTypes().get(2);
9175 internal_static_hbase_pb_RegionServerReportRequest_fieldAccessorTable = new
9176 com.google.protobuf.GeneratedMessage.FieldAccessorTable(
9177 internal_static_hbase_pb_RegionServerReportRequest_descriptor,
9178 new java.lang.String[] { "Server", "Load", });
9179 internal_static_hbase_pb_RegionServerReportResponse_descriptor =
9180 getDescriptor().getMessageTypes().get(3);
9181 internal_static_hbase_pb_RegionServerReportResponse_fieldAccessorTable = new
9182 com.google.protobuf.GeneratedMessage.FieldAccessorTable(
9183 internal_static_hbase_pb_RegionServerReportResponse_descriptor,
9184 new java.lang.String[] { });
9185 internal_static_hbase_pb_ReportRSFatalErrorRequest_descriptor =
9186 getDescriptor().getMessageTypes().get(4);
9187 internal_static_hbase_pb_ReportRSFatalErrorRequest_fieldAccessorTable = new
9188 com.google.protobuf.GeneratedMessage.FieldAccessorTable(
9189 internal_static_hbase_pb_ReportRSFatalErrorRequest_descriptor,
9190 new java.lang.String[] { "Server", "ErrorMessage", });
9191 internal_static_hbase_pb_ReportRSFatalErrorResponse_descriptor =
9192 getDescriptor().getMessageTypes().get(5);
9193 internal_static_hbase_pb_ReportRSFatalErrorResponse_fieldAccessorTable = new
9194 com.google.protobuf.GeneratedMessage.FieldAccessorTable(
9195 internal_static_hbase_pb_ReportRSFatalErrorResponse_descriptor,
9196 new java.lang.String[] { });
9197 internal_static_hbase_pb_GetLastFlushedSequenceIdRequest_descriptor =
9198 getDescriptor().getMessageTypes().get(6);
9199 internal_static_hbase_pb_GetLastFlushedSequenceIdRequest_fieldAccessorTable = new
9200 com.google.protobuf.GeneratedMessage.FieldAccessorTable(
9201 internal_static_hbase_pb_GetLastFlushedSequenceIdRequest_descriptor,
9202 new java.lang.String[] { "RegionName", });
9203 internal_static_hbase_pb_GetLastFlushedSequenceIdResponse_descriptor =
9204 getDescriptor().getMessageTypes().get(7);
9205 internal_static_hbase_pb_GetLastFlushedSequenceIdResponse_fieldAccessorTable = new
9206 com.google.protobuf.GeneratedMessage.FieldAccessorTable(
9207 internal_static_hbase_pb_GetLastFlushedSequenceIdResponse_descriptor,
9208 new java.lang.String[] { "LastFlushedSequenceId", "StoreLastFlushedSequenceId", });
9209 internal_static_hbase_pb_RegionStateTransition_descriptor =
9210 getDescriptor().getMessageTypes().get(8);
9211 internal_static_hbase_pb_RegionStateTransition_fieldAccessorTable = new
9212 com.google.protobuf.GeneratedMessage.FieldAccessorTable(
9213 internal_static_hbase_pb_RegionStateTransition_descriptor,
9214 new java.lang.String[] { "TransitionCode", "RegionInfo", "OpenSeqNum", });
9215 internal_static_hbase_pb_ReportRegionStateTransitionRequest_descriptor =
9216 getDescriptor().getMessageTypes().get(9);
9217 internal_static_hbase_pb_ReportRegionStateTransitionRequest_fieldAccessorTable = new
9218 com.google.protobuf.GeneratedMessage.FieldAccessorTable(
9219 internal_static_hbase_pb_ReportRegionStateTransitionRequest_descriptor,
9220 new java.lang.String[] { "Server", "Transition", });
9221 internal_static_hbase_pb_ReportRegionStateTransitionResponse_descriptor =
9222 getDescriptor().getMessageTypes().get(10);
9223 internal_static_hbase_pb_ReportRegionStateTransitionResponse_fieldAccessorTable = new
9224 com.google.protobuf.GeneratedMessage.FieldAccessorTable(
9225 internal_static_hbase_pb_ReportRegionStateTransitionResponse_descriptor,
9226 new java.lang.String[] { "ErrorMessage", });
9227 return null;
9228 }
9229 };
9230 com.google.protobuf.Descriptors.FileDescriptor
9231 .internalBuildGeneratedFileFrom(descriptorData,
9232 new com.google.protobuf.Descriptors.FileDescriptor[] {
9233 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(),
9234 org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.getDescriptor(),
9235 }, assigner);
9236 }
9237
9238
9239 }