View Javadoc

1   // Generated by the protocol buffer compiler.  DO NOT EDIT!
2   // source: Admin.proto
3   
4   package org.apache.hadoop.hbase.protobuf.generated;
5   
6   public final class AdminProtos {
7     private AdminProtos() {}
8     public static void registerAllExtensions(
9         com.google.protobuf.ExtensionRegistry registry) {
10    }
11    public interface GetRegionInfoRequestOrBuilder
12        extends com.google.protobuf.MessageOrBuilder {
13  
14      // required .hbase.pb.RegionSpecifier region = 1;
15      /**
16       * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
17       */
18      boolean hasRegion();
19      /**
20       * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
21       */
22      org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
23      /**
24       * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
25       */
26      org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
27  
28      // optional bool compaction_state = 2;
29      /**
30       * <code>optional bool compaction_state = 2;</code>
31       */
32      boolean hasCompactionState();
33      /**
34       * <code>optional bool compaction_state = 2;</code>
35       */
36      boolean getCompactionState();
37    }
38    /**
39     * Protobuf type {@code hbase.pb.GetRegionInfoRequest}
40     */
41    public static final class GetRegionInfoRequest extends
42        com.google.protobuf.GeneratedMessage
43        implements GetRegionInfoRequestOrBuilder {
44      // Use GetRegionInfoRequest.newBuilder() to construct.
45      private GetRegionInfoRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
46        super(builder);
47        this.unknownFields = builder.getUnknownFields();
48      }
49      private GetRegionInfoRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
50  
51      private static final GetRegionInfoRequest defaultInstance;
52      public static GetRegionInfoRequest getDefaultInstance() {
53        return defaultInstance;
54      }
55  
56      public GetRegionInfoRequest getDefaultInstanceForType() {
57        return defaultInstance;
58      }
59  
60      private final com.google.protobuf.UnknownFieldSet unknownFields;
61      @java.lang.Override
62      public final com.google.protobuf.UnknownFieldSet
63          getUnknownFields() {
64        return this.unknownFields;
65      }
66      private GetRegionInfoRequest(
67          com.google.protobuf.CodedInputStream input,
68          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
69          throws com.google.protobuf.InvalidProtocolBufferException {
70        initFields();
71        int mutable_bitField0_ = 0;
72        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
73            com.google.protobuf.UnknownFieldSet.newBuilder();
74        try {
75          boolean done = false;
76          while (!done) {
77            int tag = input.readTag();
78            switch (tag) {
79              case 0:
80                done = true;
81                break;
82              default: {
83                if (!parseUnknownField(input, unknownFields,
84                                       extensionRegistry, tag)) {
85                  done = true;
86                }
87                break;
88              }
89              case 10: {
90                org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null;
91                if (((bitField0_ & 0x00000001) == 0x00000001)) {
92                  subBuilder = region_.toBuilder();
93                }
94                region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry);
95                if (subBuilder != null) {
96                  subBuilder.mergeFrom(region_);
97                  region_ = subBuilder.buildPartial();
98                }
99                bitField0_ |= 0x00000001;
100               break;
101             }
102             case 16: {
103               bitField0_ |= 0x00000002;
104               compactionState_ = input.readBool();
105               break;
106             }
107           }
108         }
109       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
110         throw e.setUnfinishedMessage(this);
111       } catch (java.io.IOException e) {
112         throw new com.google.protobuf.InvalidProtocolBufferException(
113             e.getMessage()).setUnfinishedMessage(this);
114       } finally {
115         this.unknownFields = unknownFields.build();
116         makeExtensionsImmutable();
117       }
118     }
119     public static final com.google.protobuf.Descriptors.Descriptor
120         getDescriptor() {
121       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetRegionInfoRequest_descriptor;
122     }
123 
124     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
125         internalGetFieldAccessorTable() {
126       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetRegionInfoRequest_fieldAccessorTable
127           .ensureFieldAccessorsInitialized(
128               org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.Builder.class);
129     }
130 
131     public static com.google.protobuf.Parser<GetRegionInfoRequest> PARSER =
132         new com.google.protobuf.AbstractParser<GetRegionInfoRequest>() {
133       public GetRegionInfoRequest parsePartialFrom(
134           com.google.protobuf.CodedInputStream input,
135           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
136           throws com.google.protobuf.InvalidProtocolBufferException {
137         return new GetRegionInfoRequest(input, extensionRegistry);
138       }
139     };
140 
141     @java.lang.Override
142     public com.google.protobuf.Parser<GetRegionInfoRequest> getParserForType() {
143       return PARSER;
144     }
145 
146     private int bitField0_;
147     // required .hbase.pb.RegionSpecifier region = 1;
148     public static final int REGION_FIELD_NUMBER = 1;
149     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_;
150     /**
151      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
152      */
153     public boolean hasRegion() {
154       return ((bitField0_ & 0x00000001) == 0x00000001);
155     }
156     /**
157      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
158      */
159     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
160       return region_;
161     }
162     /**
163      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
164      */
165     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
166       return region_;
167     }
168 
169     // optional bool compaction_state = 2;
170     public static final int COMPACTION_STATE_FIELD_NUMBER = 2;
171     private boolean compactionState_;
172     /**
173      * <code>optional bool compaction_state = 2;</code>
174      */
175     public boolean hasCompactionState() {
176       return ((bitField0_ & 0x00000002) == 0x00000002);
177     }
178     /**
179      * <code>optional bool compaction_state = 2;</code>
180      */
181     public boolean getCompactionState() {
182       return compactionState_;
183     }
184 
185     private void initFields() {
186       region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
187       compactionState_ = false;
188     }
189     private byte memoizedIsInitialized = -1;
190     public final boolean isInitialized() {
191       byte isInitialized = memoizedIsInitialized;
192       if (isInitialized != -1) return isInitialized == 1;
193 
194       if (!hasRegion()) {
195         memoizedIsInitialized = 0;
196         return false;
197       }
198       if (!getRegion().isInitialized()) {
199         memoizedIsInitialized = 0;
200         return false;
201       }
202       memoizedIsInitialized = 1;
203       return true;
204     }
205 
206     public void writeTo(com.google.protobuf.CodedOutputStream output)
207                         throws java.io.IOException {
208       getSerializedSize();
209       if (((bitField0_ & 0x00000001) == 0x00000001)) {
210         output.writeMessage(1, region_);
211       }
212       if (((bitField0_ & 0x00000002) == 0x00000002)) {
213         output.writeBool(2, compactionState_);
214       }
215       getUnknownFields().writeTo(output);
216     }
217 
218     private int memoizedSerializedSize = -1;
219     public int getSerializedSize() {
220       int size = memoizedSerializedSize;
221       if (size != -1) return size;
222 
223       size = 0;
224       if (((bitField0_ & 0x00000001) == 0x00000001)) {
225         size += com.google.protobuf.CodedOutputStream
226           .computeMessageSize(1, region_);
227       }
228       if (((bitField0_ & 0x00000002) == 0x00000002)) {
229         size += com.google.protobuf.CodedOutputStream
230           .computeBoolSize(2, compactionState_);
231       }
232       size += getUnknownFields().getSerializedSize();
233       memoizedSerializedSize = size;
234       return size;
235     }
236 
237     private static final long serialVersionUID = 0L;
238     @java.lang.Override
239     protected java.lang.Object writeReplace()
240         throws java.io.ObjectStreamException {
241       return super.writeReplace();
242     }
243 
244     @java.lang.Override
245     public boolean equals(final java.lang.Object obj) {
246       if (obj == this) {
247        return true;
248       }
249       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest)) {
250         return super.equals(obj);
251       }
252       org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest) obj;
253 
254       boolean result = true;
255       result = result && (hasRegion() == other.hasRegion());
256       if (hasRegion()) {
257         result = result && getRegion()
258             .equals(other.getRegion());
259       }
260       result = result && (hasCompactionState() == other.hasCompactionState());
261       if (hasCompactionState()) {
262         result = result && (getCompactionState()
263             == other.getCompactionState());
264       }
265       result = result &&
266           getUnknownFields().equals(other.getUnknownFields());
267       return result;
268     }
269 
270     private int memoizedHashCode = 0;
271     @java.lang.Override
272     public int hashCode() {
273       if (memoizedHashCode != 0) {
274         return memoizedHashCode;
275       }
276       int hash = 41;
277       hash = (19 * hash) + getDescriptorForType().hashCode();
278       if (hasRegion()) {
279         hash = (37 * hash) + REGION_FIELD_NUMBER;
280         hash = (53 * hash) + getRegion().hashCode();
281       }
282       if (hasCompactionState()) {
283         hash = (37 * hash) + COMPACTION_STATE_FIELD_NUMBER;
284         hash = (53 * hash) + hashBoolean(getCompactionState());
285       }
286       hash = (29 * hash) + getUnknownFields().hashCode();
287       memoizedHashCode = hash;
288       return hash;
289     }
290 
291     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom(
292         com.google.protobuf.ByteString data)
293         throws com.google.protobuf.InvalidProtocolBufferException {
294       return PARSER.parseFrom(data);
295     }
296     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom(
297         com.google.protobuf.ByteString data,
298         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
299         throws com.google.protobuf.InvalidProtocolBufferException {
300       return PARSER.parseFrom(data, extensionRegistry);
301     }
302     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom(byte[] data)
303         throws com.google.protobuf.InvalidProtocolBufferException {
304       return PARSER.parseFrom(data);
305     }
306     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom(
307         byte[] data,
308         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
309         throws com.google.protobuf.InvalidProtocolBufferException {
310       return PARSER.parseFrom(data, extensionRegistry);
311     }
312     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom(java.io.InputStream input)
313         throws java.io.IOException {
314       return PARSER.parseFrom(input);
315     }
316     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom(
317         java.io.InputStream input,
318         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
319         throws java.io.IOException {
320       return PARSER.parseFrom(input, extensionRegistry);
321     }
322     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseDelimitedFrom(java.io.InputStream input)
323         throws java.io.IOException {
324       return PARSER.parseDelimitedFrom(input);
325     }
326     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseDelimitedFrom(
327         java.io.InputStream input,
328         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
329         throws java.io.IOException {
330       return PARSER.parseDelimitedFrom(input, extensionRegistry);
331     }
332     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom(
333         com.google.protobuf.CodedInputStream input)
334         throws java.io.IOException {
335       return PARSER.parseFrom(input);
336     }
337     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom(
338         com.google.protobuf.CodedInputStream input,
339         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
340         throws java.io.IOException {
341       return PARSER.parseFrom(input, extensionRegistry);
342     }
343 
344     public static Builder newBuilder() { return Builder.create(); }
345     public Builder newBuilderForType() { return newBuilder(); }
346     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest prototype) {
347       return newBuilder().mergeFrom(prototype);
348     }
349     public Builder toBuilder() { return newBuilder(this); }
350 
351     @java.lang.Override
352     protected Builder newBuilderForType(
353         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
354       Builder builder = new Builder(parent);
355       return builder;
356     }
357     /**
358      * Protobuf type {@code hbase.pb.GetRegionInfoRequest}
359      */
360     public static final class Builder extends
361         com.google.protobuf.GeneratedMessage.Builder<Builder>
362        implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequestOrBuilder {
363       public static final com.google.protobuf.Descriptors.Descriptor
364           getDescriptor() {
365         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetRegionInfoRequest_descriptor;
366       }
367 
368       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
369           internalGetFieldAccessorTable() {
370         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetRegionInfoRequest_fieldAccessorTable
371             .ensureFieldAccessorsInitialized(
372                 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.Builder.class);
373       }
374 
375       // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.newBuilder()
376       private Builder() {
377         maybeForceBuilderInitialization();
378       }
379 
380       private Builder(
381           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
382         super(parent);
383         maybeForceBuilderInitialization();
384       }
385       private void maybeForceBuilderInitialization() {
386         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
387           getRegionFieldBuilder();
388         }
389       }
390       private static Builder create() {
391         return new Builder();
392       }
393 
394       public Builder clear() {
395         super.clear();
396         if (regionBuilder_ == null) {
397           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
398         } else {
399           regionBuilder_.clear();
400         }
401         bitField0_ = (bitField0_ & ~0x00000001);
402         compactionState_ = false;
403         bitField0_ = (bitField0_ & ~0x00000002);
404         return this;
405       }
406 
407       public Builder clone() {
408         return create().mergeFrom(buildPartial());
409       }
410 
411       public com.google.protobuf.Descriptors.Descriptor
412           getDescriptorForType() {
413         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetRegionInfoRequest_descriptor;
414       }
415 
416       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest getDefaultInstanceForType() {
417         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.getDefaultInstance();
418       }
419 
420       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest build() {
421         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest result = buildPartial();
422         if (!result.isInitialized()) {
423           throw newUninitializedMessageException(result);
424         }
425         return result;
426       }
427 
428       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest buildPartial() {
429         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest(this);
430         int from_bitField0_ = bitField0_;
431         int to_bitField0_ = 0;
432         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
433           to_bitField0_ |= 0x00000001;
434         }
435         if (regionBuilder_ == null) {
436           result.region_ = region_;
437         } else {
438           result.region_ = regionBuilder_.build();
439         }
440         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
441           to_bitField0_ |= 0x00000002;
442         }
443         result.compactionState_ = compactionState_;
444         result.bitField0_ = to_bitField0_;
445         onBuilt();
446         return result;
447       }
448 
449       public Builder mergeFrom(com.google.protobuf.Message other) {
450         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest) {
451           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest)other);
452         } else {
453           super.mergeFrom(other);
454           return this;
455         }
456       }
457 
458       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest other) {
459         if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.getDefaultInstance()) return this;
460         if (other.hasRegion()) {
461           mergeRegion(other.getRegion());
462         }
463         if (other.hasCompactionState()) {
464           setCompactionState(other.getCompactionState());
465         }
466         this.mergeUnknownFields(other.getUnknownFields());
467         return this;
468       }
469 
470       public final boolean isInitialized() {
471         if (!hasRegion()) {
472           
473           return false;
474         }
475         if (!getRegion().isInitialized()) {
476           
477           return false;
478         }
479         return true;
480       }
481 
482       public Builder mergeFrom(
483           com.google.protobuf.CodedInputStream input,
484           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
485           throws java.io.IOException {
486         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parsedMessage = null;
487         try {
488           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
489         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
490           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest) e.getUnfinishedMessage();
491           throw e;
492         } finally {
493           if (parsedMessage != null) {
494             mergeFrom(parsedMessage);
495           }
496         }
497         return this;
498       }
499       private int bitField0_;
500 
501       // required .hbase.pb.RegionSpecifier region = 1;
502       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
503       private com.google.protobuf.SingleFieldBuilder<
504           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_;
505       /**
506        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
507        */
508       public boolean hasRegion() {
509         return ((bitField0_ & 0x00000001) == 0x00000001);
510       }
511       /**
512        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
513        */
514       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
515         if (regionBuilder_ == null) {
516           return region_;
517         } else {
518           return regionBuilder_.getMessage();
519         }
520       }
521       /**
522        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
523        */
524       public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
525         if (regionBuilder_ == null) {
526           if (value == null) {
527             throw new NullPointerException();
528           }
529           region_ = value;
530           onChanged();
531         } else {
532           regionBuilder_.setMessage(value);
533         }
534         bitField0_ |= 0x00000001;
535         return this;
536       }
537       /**
538        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
539        */
540       public Builder setRegion(
541           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
542         if (regionBuilder_ == null) {
543           region_ = builderForValue.build();
544           onChanged();
545         } else {
546           regionBuilder_.setMessage(builderForValue.build());
547         }
548         bitField0_ |= 0x00000001;
549         return this;
550       }
551       /**
552        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
553        */
554       public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
555         if (regionBuilder_ == null) {
556           if (((bitField0_ & 0x00000001) == 0x00000001) &&
557               region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) {
558             region_ =
559               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial();
560           } else {
561             region_ = value;
562           }
563           onChanged();
564         } else {
565           regionBuilder_.mergeFrom(value);
566         }
567         bitField0_ |= 0x00000001;
568         return this;
569       }
570       /**
571        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
572        */
573       public Builder clearRegion() {
574         if (regionBuilder_ == null) {
575           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
576           onChanged();
577         } else {
578           regionBuilder_.clear();
579         }
580         bitField0_ = (bitField0_ & ~0x00000001);
581         return this;
582       }
583       /**
584        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
585        */
586       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() {
587         bitField0_ |= 0x00000001;
588         onChanged();
589         return getRegionFieldBuilder().getBuilder();
590       }
591       /**
592        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
593        */
594       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
595         if (regionBuilder_ != null) {
596           return regionBuilder_.getMessageOrBuilder();
597         } else {
598           return region_;
599         }
600       }
601       /**
602        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
603        */
604       private com.google.protobuf.SingleFieldBuilder<
605           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> 
606           getRegionFieldBuilder() {
607         if (regionBuilder_ == null) {
608           regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
609               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
610                   region_,
611                   getParentForChildren(),
612                   isClean());
613           region_ = null;
614         }
615         return regionBuilder_;
616       }
617 
618       // optional bool compaction_state = 2;
619       private boolean compactionState_ ;
620       /**
621        * <code>optional bool compaction_state = 2;</code>
622        */
623       public boolean hasCompactionState() {
624         return ((bitField0_ & 0x00000002) == 0x00000002);
625       }
626       /**
627        * <code>optional bool compaction_state = 2;</code>
628        */
629       public boolean getCompactionState() {
630         return compactionState_;
631       }
632       /**
633        * <code>optional bool compaction_state = 2;</code>
634        */
635       public Builder setCompactionState(boolean value) {
636         bitField0_ |= 0x00000002;
637         compactionState_ = value;
638         onChanged();
639         return this;
640       }
641       /**
642        * <code>optional bool compaction_state = 2;</code>
643        */
644       public Builder clearCompactionState() {
645         bitField0_ = (bitField0_ & ~0x00000002);
646         compactionState_ = false;
647         onChanged();
648         return this;
649       }
650 
651       // @@protoc_insertion_point(builder_scope:hbase.pb.GetRegionInfoRequest)
652     }
653 
654     static {
655       defaultInstance = new GetRegionInfoRequest(true);
656       defaultInstance.initFields();
657     }
658 
659     // @@protoc_insertion_point(class_scope:hbase.pb.GetRegionInfoRequest)
660   }
661 
662   public interface GetRegionInfoResponseOrBuilder
663       extends com.google.protobuf.MessageOrBuilder {
664 
665     // required .hbase.pb.RegionInfo region_info = 1;
666     /**
667      * <code>required .hbase.pb.RegionInfo region_info = 1;</code>
668      */
669     boolean hasRegionInfo();
670     /**
671      * <code>required .hbase.pb.RegionInfo region_info = 1;</code>
672      */
673     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo();
674     /**
675      * <code>required .hbase.pb.RegionInfo region_info = 1;</code>
676      */
677     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder();
678 
679     // optional .hbase.pb.GetRegionInfoResponse.CompactionState compaction_state = 2;
680     /**
681      * <code>optional .hbase.pb.GetRegionInfoResponse.CompactionState compaction_state = 2;</code>
682      */
683     boolean hasCompactionState();
684     /**
685      * <code>optional .hbase.pb.GetRegionInfoResponse.CompactionState compaction_state = 2;</code>
686      */
687     org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState getCompactionState();
688 
689     // optional bool isRecovering = 3;
690     /**
691      * <code>optional bool isRecovering = 3;</code>
692      */
693     boolean hasIsRecovering();
694     /**
695      * <code>optional bool isRecovering = 3;</code>
696      */
697     boolean getIsRecovering();
698   }
699   /**
700    * Protobuf type {@code hbase.pb.GetRegionInfoResponse}
701    */
702   public static final class GetRegionInfoResponse extends
703       com.google.protobuf.GeneratedMessage
704       implements GetRegionInfoResponseOrBuilder {
705     // Use GetRegionInfoResponse.newBuilder() to construct.
706     private GetRegionInfoResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
707       super(builder);
708       this.unknownFields = builder.getUnknownFields();
709     }
710     private GetRegionInfoResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
711 
712     private static final GetRegionInfoResponse defaultInstance;
713     public static GetRegionInfoResponse getDefaultInstance() {
714       return defaultInstance;
715     }
716 
717     public GetRegionInfoResponse getDefaultInstanceForType() {
718       return defaultInstance;
719     }
720 
721     private final com.google.protobuf.UnknownFieldSet unknownFields;
722     @java.lang.Override
723     public final com.google.protobuf.UnknownFieldSet
724         getUnknownFields() {
725       return this.unknownFields;
726     }
727     private GetRegionInfoResponse(
728         com.google.protobuf.CodedInputStream input,
729         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
730         throws com.google.protobuf.InvalidProtocolBufferException {
731       initFields();
732       int mutable_bitField0_ = 0;
733       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
734           com.google.protobuf.UnknownFieldSet.newBuilder();
735       try {
736         boolean done = false;
737         while (!done) {
738           int tag = input.readTag();
739           switch (tag) {
740             case 0:
741               done = true;
742               break;
743             default: {
744               if (!parseUnknownField(input, unknownFields,
745                                      extensionRegistry, tag)) {
746                 done = true;
747               }
748               break;
749             }
750             case 10: {
751               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder subBuilder = null;
752               if (((bitField0_ & 0x00000001) == 0x00000001)) {
753                 subBuilder = regionInfo_.toBuilder();
754               }
755               regionInfo_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.PARSER, extensionRegistry);
756               if (subBuilder != null) {
757                 subBuilder.mergeFrom(regionInfo_);
758                 regionInfo_ = subBuilder.buildPartial();
759               }
760               bitField0_ |= 0x00000001;
761               break;
762             }
763             case 16: {
764               int rawValue = input.readEnum();
765               org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState value = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState.valueOf(rawValue);
766               if (value == null) {
767                 unknownFields.mergeVarintField(2, rawValue);
768               } else {
769                 bitField0_ |= 0x00000002;
770                 compactionState_ = value;
771               }
772               break;
773             }
774             case 24: {
775               bitField0_ |= 0x00000004;
776               isRecovering_ = input.readBool();
777               break;
778             }
779           }
780         }
781       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
782         throw e.setUnfinishedMessage(this);
783       } catch (java.io.IOException e) {
784         throw new com.google.protobuf.InvalidProtocolBufferException(
785             e.getMessage()).setUnfinishedMessage(this);
786       } finally {
787         this.unknownFields = unknownFields.build();
788         makeExtensionsImmutable();
789       }
790     }
791     public static final com.google.protobuf.Descriptors.Descriptor
792         getDescriptor() {
793       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetRegionInfoResponse_descriptor;
794     }
795 
796     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
797         internalGetFieldAccessorTable() {
798       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetRegionInfoResponse_fieldAccessorTable
799           .ensureFieldAccessorsInitialized(
800               org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.Builder.class);
801     }
802 
803     public static com.google.protobuf.Parser<GetRegionInfoResponse> PARSER =
804         new com.google.protobuf.AbstractParser<GetRegionInfoResponse>() {
805       public GetRegionInfoResponse parsePartialFrom(
806           com.google.protobuf.CodedInputStream input,
807           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
808           throws com.google.protobuf.InvalidProtocolBufferException {
809         return new GetRegionInfoResponse(input, extensionRegistry);
810       }
811     };
812 
813     @java.lang.Override
814     public com.google.protobuf.Parser<GetRegionInfoResponse> getParserForType() {
815       return PARSER;
816     }
817 
818     /**
819      * Protobuf enum {@code hbase.pb.GetRegionInfoResponse.CompactionState}
820      */
821     public enum CompactionState
822         implements com.google.protobuf.ProtocolMessageEnum {
823       /**
824        * <code>NONE = 0;</code>
825        */
826       NONE(0, 0),
827       /**
828        * <code>MINOR = 1;</code>
829        */
830       MINOR(1, 1),
831       /**
832        * <code>MAJOR = 2;</code>
833        */
834       MAJOR(2, 2),
835       /**
836        * <code>MAJOR_AND_MINOR = 3;</code>
837        */
838       MAJOR_AND_MINOR(3, 3),
839       ;
840 
841       /**
842        * <code>NONE = 0;</code>
843        */
844       public static final int NONE_VALUE = 0;
845       /**
846        * <code>MINOR = 1;</code>
847        */
848       public static final int MINOR_VALUE = 1;
849       /**
850        * <code>MAJOR = 2;</code>
851        */
852       public static final int MAJOR_VALUE = 2;
853       /**
854        * <code>MAJOR_AND_MINOR = 3;</code>
855        */
856       public static final int MAJOR_AND_MINOR_VALUE = 3;
857 
858 
859       public final int getNumber() { return value; }
860 
861       public static CompactionState valueOf(int value) {
862         switch (value) {
863           case 0: return NONE;
864           case 1: return MINOR;
865           case 2: return MAJOR;
866           case 3: return MAJOR_AND_MINOR;
867           default: return null;
868         }
869       }
870 
871       public static com.google.protobuf.Internal.EnumLiteMap<CompactionState>
872           internalGetValueMap() {
873         return internalValueMap;
874       }
875       private static com.google.protobuf.Internal.EnumLiteMap<CompactionState>
876           internalValueMap =
877             new com.google.protobuf.Internal.EnumLiteMap<CompactionState>() {
878               public CompactionState findValueByNumber(int number) {
879                 return CompactionState.valueOf(number);
880               }
881             };
882 
883       public final com.google.protobuf.Descriptors.EnumValueDescriptor
884           getValueDescriptor() {
885         return getDescriptor().getValues().get(index);
886       }
887       public final com.google.protobuf.Descriptors.EnumDescriptor
888           getDescriptorForType() {
889         return getDescriptor();
890       }
891       public static final com.google.protobuf.Descriptors.EnumDescriptor
892           getDescriptor() {
893         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDescriptor().getEnumTypes().get(0);
894       }
895 
896       private static final CompactionState[] VALUES = values();
897 
898       public static CompactionState valueOf(
899           com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
900         if (desc.getType() != getDescriptor()) {
901           throw new java.lang.IllegalArgumentException(
902             "EnumValueDescriptor is not for this type.");
903         }
904         return VALUES[desc.getIndex()];
905       }
906 
907       private final int index;
908       private final int value;
909 
910       private CompactionState(int index, int value) {
911         this.index = index;
912         this.value = value;
913       }
914 
915       // @@protoc_insertion_point(enum_scope:hbase.pb.GetRegionInfoResponse.CompactionState)
916     }
917 
918     private int bitField0_;
919     // required .hbase.pb.RegionInfo region_info = 1;
920     public static final int REGION_INFO_FIELD_NUMBER = 1;
921     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo regionInfo_;
922     /**
923      * <code>required .hbase.pb.RegionInfo region_info = 1;</code>
924      */
925     public boolean hasRegionInfo() {
926       return ((bitField0_ & 0x00000001) == 0x00000001);
927     }
928     /**
929      * <code>required .hbase.pb.RegionInfo region_info = 1;</code>
930      */
931     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo() {
932       return regionInfo_;
933     }
934     /**
935      * <code>required .hbase.pb.RegionInfo region_info = 1;</code>
936      */
937     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder() {
938       return regionInfo_;
939     }
940 
941     // optional .hbase.pb.GetRegionInfoResponse.CompactionState compaction_state = 2;
942     public static final int COMPACTION_STATE_FIELD_NUMBER = 2;
943     private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState compactionState_;
944     /**
945      * <code>optional .hbase.pb.GetRegionInfoResponse.CompactionState compaction_state = 2;</code>
946      */
947     public boolean hasCompactionState() {
948       return ((bitField0_ & 0x00000002) == 0x00000002);
949     }
950     /**
951      * <code>optional .hbase.pb.GetRegionInfoResponse.CompactionState compaction_state = 2;</code>
952      */
953     public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState getCompactionState() {
954       return compactionState_;
955     }
956 
957     // optional bool isRecovering = 3;
958     public static final int ISRECOVERING_FIELD_NUMBER = 3;
959     private boolean isRecovering_;
960     /**
961      * <code>optional bool isRecovering = 3;</code>
962      */
963     public boolean hasIsRecovering() {
964       return ((bitField0_ & 0x00000004) == 0x00000004);
965     }
966     /**
967      * <code>optional bool isRecovering = 3;</code>
968      */
969     public boolean getIsRecovering() {
970       return isRecovering_;
971     }
972 
973     private void initFields() {
974       regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance();
975       compactionState_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState.NONE;
976       isRecovering_ = false;
977     }
978     private byte memoizedIsInitialized = -1;
979     public final boolean isInitialized() {
980       byte isInitialized = memoizedIsInitialized;
981       if (isInitialized != -1) return isInitialized == 1;
982 
983       if (!hasRegionInfo()) {
984         memoizedIsInitialized = 0;
985         return false;
986       }
987       if (!getRegionInfo().isInitialized()) {
988         memoizedIsInitialized = 0;
989         return false;
990       }
991       memoizedIsInitialized = 1;
992       return true;
993     }
994 
995     public void writeTo(com.google.protobuf.CodedOutputStream output)
996                         throws java.io.IOException {
997       getSerializedSize();
998       if (((bitField0_ & 0x00000001) == 0x00000001)) {
999         output.writeMessage(1, regionInfo_);
1000       }
1001       if (((bitField0_ & 0x00000002) == 0x00000002)) {
1002         output.writeEnum(2, compactionState_.getNumber());
1003       }
1004       if (((bitField0_ & 0x00000004) == 0x00000004)) {
1005         output.writeBool(3, isRecovering_);
1006       }
1007       getUnknownFields().writeTo(output);
1008     }
1009 
1010     private int memoizedSerializedSize = -1;
1011     public int getSerializedSize() {
1012       int size = memoizedSerializedSize;
1013       if (size != -1) return size;
1014 
1015       size = 0;
1016       if (((bitField0_ & 0x00000001) == 0x00000001)) {
1017         size += com.google.protobuf.CodedOutputStream
1018           .computeMessageSize(1, regionInfo_);
1019       }
1020       if (((bitField0_ & 0x00000002) == 0x00000002)) {
1021         size += com.google.protobuf.CodedOutputStream
1022           .computeEnumSize(2, compactionState_.getNumber());
1023       }
1024       if (((bitField0_ & 0x00000004) == 0x00000004)) {
1025         size += com.google.protobuf.CodedOutputStream
1026           .computeBoolSize(3, isRecovering_);
1027       }
1028       size += getUnknownFields().getSerializedSize();
1029       memoizedSerializedSize = size;
1030       return size;
1031     }
1032 
1033     private static final long serialVersionUID = 0L;
1034     @java.lang.Override
1035     protected java.lang.Object writeReplace()
1036         throws java.io.ObjectStreamException {
1037       return super.writeReplace();
1038     }
1039 
1040     @java.lang.Override
1041     public boolean equals(final java.lang.Object obj) {
1042       if (obj == this) {
1043        return true;
1044       }
1045       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse)) {
1046         return super.equals(obj);
1047       }
1048       org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse) obj;
1049 
1050       boolean result = true;
1051       result = result && (hasRegionInfo() == other.hasRegionInfo());
1052       if (hasRegionInfo()) {
1053         result = result && getRegionInfo()
1054             .equals(other.getRegionInfo());
1055       }
1056       result = result && (hasCompactionState() == other.hasCompactionState());
1057       if (hasCompactionState()) {
1058         result = result &&
1059             (getCompactionState() == other.getCompactionState());
1060       }
1061       result = result && (hasIsRecovering() == other.hasIsRecovering());
1062       if (hasIsRecovering()) {
1063         result = result && (getIsRecovering()
1064             == other.getIsRecovering());
1065       }
1066       result = result &&
1067           getUnknownFields().equals(other.getUnknownFields());
1068       return result;
1069     }
1070 
1071     private int memoizedHashCode = 0;
1072     @java.lang.Override
1073     public int hashCode() {
1074       if (memoizedHashCode != 0) {
1075         return memoizedHashCode;
1076       }
1077       int hash = 41;
1078       hash = (19 * hash) + getDescriptorForType().hashCode();
1079       if (hasRegionInfo()) {
1080         hash = (37 * hash) + REGION_INFO_FIELD_NUMBER;
1081         hash = (53 * hash) + getRegionInfo().hashCode();
1082       }
1083       if (hasCompactionState()) {
1084         hash = (37 * hash) + COMPACTION_STATE_FIELD_NUMBER;
1085         hash = (53 * hash) + hashEnum(getCompactionState());
1086       }
1087       if (hasIsRecovering()) {
1088         hash = (37 * hash) + ISRECOVERING_FIELD_NUMBER;
1089         hash = (53 * hash) + hashBoolean(getIsRecovering());
1090       }
1091       hash = (29 * hash) + getUnknownFields().hashCode();
1092       memoizedHashCode = hash;
1093       return hash;
1094     }
1095 
1096     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom(
1097         com.google.protobuf.ByteString data)
1098         throws com.google.protobuf.InvalidProtocolBufferException {
1099       return PARSER.parseFrom(data);
1100     }
1101     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom(
1102         com.google.protobuf.ByteString data,
1103         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1104         throws com.google.protobuf.InvalidProtocolBufferException {
1105       return PARSER.parseFrom(data, extensionRegistry);
1106     }
1107     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom(byte[] data)
1108         throws com.google.protobuf.InvalidProtocolBufferException {
1109       return PARSER.parseFrom(data);
1110     }
1111     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom(
1112         byte[] data,
1113         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1114         throws com.google.protobuf.InvalidProtocolBufferException {
1115       return PARSER.parseFrom(data, extensionRegistry);
1116     }
1117     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom(java.io.InputStream input)
1118         throws java.io.IOException {
1119       return PARSER.parseFrom(input);
1120     }
1121     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom(
1122         java.io.InputStream input,
1123         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1124         throws java.io.IOException {
1125       return PARSER.parseFrom(input, extensionRegistry);
1126     }
1127     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseDelimitedFrom(java.io.InputStream input)
1128         throws java.io.IOException {
1129       return PARSER.parseDelimitedFrom(input);
1130     }
1131     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseDelimitedFrom(
1132         java.io.InputStream input,
1133         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1134         throws java.io.IOException {
1135       return PARSER.parseDelimitedFrom(input, extensionRegistry);
1136     }
1137     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom(
1138         com.google.protobuf.CodedInputStream input)
1139         throws java.io.IOException {
1140       return PARSER.parseFrom(input);
1141     }
1142     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom(
1143         com.google.protobuf.CodedInputStream input,
1144         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1145         throws java.io.IOException {
1146       return PARSER.parseFrom(input, extensionRegistry);
1147     }
1148 
1149     public static Builder newBuilder() { return Builder.create(); }
1150     public Builder newBuilderForType() { return newBuilder(); }
1151     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse prototype) {
1152       return newBuilder().mergeFrom(prototype);
1153     }
1154     public Builder toBuilder() { return newBuilder(this); }
1155 
1156     @java.lang.Override
1157     protected Builder newBuilderForType(
1158         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1159       Builder builder = new Builder(parent);
1160       return builder;
1161     }
1162     /**
1163      * Protobuf type {@code hbase.pb.GetRegionInfoResponse}
1164      */
1165     public static final class Builder extends
1166         com.google.protobuf.GeneratedMessage.Builder<Builder>
1167        implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponseOrBuilder {
1168       public static final com.google.protobuf.Descriptors.Descriptor
1169           getDescriptor() {
1170         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetRegionInfoResponse_descriptor;
1171       }
1172 
1173       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1174           internalGetFieldAccessorTable() {
1175         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetRegionInfoResponse_fieldAccessorTable
1176             .ensureFieldAccessorsInitialized(
1177                 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.Builder.class);
1178       }
1179 
1180       // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.newBuilder()
1181       private Builder() {
1182         maybeForceBuilderInitialization();
1183       }
1184 
1185       private Builder(
1186           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1187         super(parent);
1188         maybeForceBuilderInitialization();
1189       }
1190       private void maybeForceBuilderInitialization() {
1191         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
1192           getRegionInfoFieldBuilder();
1193         }
1194       }
1195       private static Builder create() {
1196         return new Builder();
1197       }
1198 
1199       public Builder clear() {
1200         super.clear();
1201         if (regionInfoBuilder_ == null) {
1202           regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance();
1203         } else {
1204           regionInfoBuilder_.clear();
1205         }
1206         bitField0_ = (bitField0_ & ~0x00000001);
1207         compactionState_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState.NONE;
1208         bitField0_ = (bitField0_ & ~0x00000002);
1209         isRecovering_ = false;
1210         bitField0_ = (bitField0_ & ~0x00000004);
1211         return this;
1212       }
1213 
1214       public Builder clone() {
1215         return create().mergeFrom(buildPartial());
1216       }
1217 
1218       public com.google.protobuf.Descriptors.Descriptor
1219           getDescriptorForType() {
1220         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetRegionInfoResponse_descriptor;
1221       }
1222 
1223       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse getDefaultInstanceForType() {
1224         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDefaultInstance();
1225       }
1226 
1227       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse build() {
1228         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse result = buildPartial();
1229         if (!result.isInitialized()) {
1230           throw newUninitializedMessageException(result);
1231         }
1232         return result;
1233       }
1234 
1235       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse buildPartial() {
1236         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse(this);
1237         int from_bitField0_ = bitField0_;
1238         int to_bitField0_ = 0;
1239         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
1240           to_bitField0_ |= 0x00000001;
1241         }
1242         if (regionInfoBuilder_ == null) {
1243           result.regionInfo_ = regionInfo_;
1244         } else {
1245           result.regionInfo_ = regionInfoBuilder_.build();
1246         }
1247         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
1248           to_bitField0_ |= 0x00000002;
1249         }
1250         result.compactionState_ = compactionState_;
1251         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
1252           to_bitField0_ |= 0x00000004;
1253         }
1254         result.isRecovering_ = isRecovering_;
1255         result.bitField0_ = to_bitField0_;
1256         onBuilt();
1257         return result;
1258       }
1259 
1260       public Builder mergeFrom(com.google.protobuf.Message other) {
1261         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse) {
1262           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse)other);
1263         } else {
1264           super.mergeFrom(other);
1265           return this;
1266         }
1267       }
1268 
1269       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse other) {
1270         if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDefaultInstance()) return this;
1271         if (other.hasRegionInfo()) {
1272           mergeRegionInfo(other.getRegionInfo());
1273         }
1274         if (other.hasCompactionState()) {
1275           setCompactionState(other.getCompactionState());
1276         }
1277         if (other.hasIsRecovering()) {
1278           setIsRecovering(other.getIsRecovering());
1279         }
1280         this.mergeUnknownFields(other.getUnknownFields());
1281         return this;
1282       }
1283 
1284       public final boolean isInitialized() {
1285         if (!hasRegionInfo()) {
1286           
1287           return false;
1288         }
1289         if (!getRegionInfo().isInitialized()) {
1290           
1291           return false;
1292         }
1293         return true;
1294       }
1295 
1296       public Builder mergeFrom(
1297           com.google.protobuf.CodedInputStream input,
1298           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1299           throws java.io.IOException {
1300         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parsedMessage = null;
1301         try {
1302           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
1303         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1304           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse) e.getUnfinishedMessage();
1305           throw e;
1306         } finally {
1307           if (parsedMessage != null) {
1308             mergeFrom(parsedMessage);
1309           }
1310         }
1311         return this;
1312       }
1313       private int bitField0_;
1314 
1315       // required .hbase.pb.RegionInfo region_info = 1;
1316       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance();
1317       private com.google.protobuf.SingleFieldBuilder<
1318           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionInfoBuilder_;
1319       /**
1320        * <code>required .hbase.pb.RegionInfo region_info = 1;</code>
1321        */
1322       public boolean hasRegionInfo() {
1323         return ((bitField0_ & 0x00000001) == 0x00000001);
1324       }
1325       /**
1326        * <code>required .hbase.pb.RegionInfo region_info = 1;</code>
1327        */
1328       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo() {
1329         if (regionInfoBuilder_ == null) {
1330           return regionInfo_;
1331         } else {
1332           return regionInfoBuilder_.getMessage();
1333         }
1334       }
1335       /**
1336        * <code>required .hbase.pb.RegionInfo region_info = 1;</code>
1337        */
1338       public Builder setRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) {
1339         if (regionInfoBuilder_ == null) {
1340           if (value == null) {
1341             throw new NullPointerException();
1342           }
1343           regionInfo_ = value;
1344           onChanged();
1345         } else {
1346           regionInfoBuilder_.setMessage(value);
1347         }
1348         bitField0_ |= 0x00000001;
1349         return this;
1350       }
1351       /**
1352        * <code>required .hbase.pb.RegionInfo region_info = 1;</code>
1353        */
1354       public Builder setRegionInfo(
1355           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) {
1356         if (regionInfoBuilder_ == null) {
1357           regionInfo_ = builderForValue.build();
1358           onChanged();
1359         } else {
1360           regionInfoBuilder_.setMessage(builderForValue.build());
1361         }
1362         bitField0_ |= 0x00000001;
1363         return this;
1364       }
1365       /**
1366        * <code>required .hbase.pb.RegionInfo region_info = 1;</code>
1367        */
1368       public Builder mergeRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) {
1369         if (regionInfoBuilder_ == null) {
1370           if (((bitField0_ & 0x00000001) == 0x00000001) &&
1371               regionInfo_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance()) {
1372             regionInfo_ =
1373               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.newBuilder(regionInfo_).mergeFrom(value).buildPartial();
1374           } else {
1375             regionInfo_ = value;
1376           }
1377           onChanged();
1378         } else {
1379           regionInfoBuilder_.mergeFrom(value);
1380         }
1381         bitField0_ |= 0x00000001;
1382         return this;
1383       }
1384       /**
1385        * <code>required .hbase.pb.RegionInfo region_info = 1;</code>
1386        */
1387       public Builder clearRegionInfo() {
1388         if (regionInfoBuilder_ == null) {
1389           regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance();
1390           onChanged();
1391         } else {
1392           regionInfoBuilder_.clear();
1393         }
1394         bitField0_ = (bitField0_ & ~0x00000001);
1395         return this;
1396       }
1397       /**
1398        * <code>required .hbase.pb.RegionInfo region_info = 1;</code>
1399        */
1400       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder getRegionInfoBuilder() {
1401         bitField0_ |= 0x00000001;
1402         onChanged();
1403         return getRegionInfoFieldBuilder().getBuilder();
1404       }
1405       /**
1406        * <code>required .hbase.pb.RegionInfo region_info = 1;</code>
1407        */
1408       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder() {
1409         if (regionInfoBuilder_ != null) {
1410           return regionInfoBuilder_.getMessageOrBuilder();
1411         } else {
1412           return regionInfo_;
1413         }
1414       }
1415       /**
1416        * <code>required .hbase.pb.RegionInfo region_info = 1;</code>
1417        */
1418       private com.google.protobuf.SingleFieldBuilder<
1419           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> 
1420           getRegionInfoFieldBuilder() {
1421         if (regionInfoBuilder_ == null) {
1422           regionInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder<
1423               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>(
1424                   regionInfo_,
1425                   getParentForChildren(),
1426                   isClean());
1427           regionInfo_ = null;
1428         }
1429         return regionInfoBuilder_;
1430       }
1431 
1432       // optional .hbase.pb.GetRegionInfoResponse.CompactionState compaction_state = 2;
1433       private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState compactionState_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState.NONE;
1434       /**
1435        * <code>optional .hbase.pb.GetRegionInfoResponse.CompactionState compaction_state = 2;</code>
1436        */
1437       public boolean hasCompactionState() {
1438         return ((bitField0_ & 0x00000002) == 0x00000002);
1439       }
1440       /**
1441        * <code>optional .hbase.pb.GetRegionInfoResponse.CompactionState compaction_state = 2;</code>
1442        */
1443       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState getCompactionState() {
1444         return compactionState_;
1445       }
1446       /**
1447        * <code>optional .hbase.pb.GetRegionInfoResponse.CompactionState compaction_state = 2;</code>
1448        */
1449       public Builder setCompactionState(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState value) {
1450         if (value == null) {
1451           throw new NullPointerException();
1452         }
1453         bitField0_ |= 0x00000002;
1454         compactionState_ = value;
1455         onChanged();
1456         return this;
1457       }
1458       /**
1459        * <code>optional .hbase.pb.GetRegionInfoResponse.CompactionState compaction_state = 2;</code>
1460        */
1461       public Builder clearCompactionState() {
1462         bitField0_ = (bitField0_ & ~0x00000002);
1463         compactionState_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState.NONE;
1464         onChanged();
1465         return this;
1466       }
1467 
1468       // optional bool isRecovering = 3;
1469       private boolean isRecovering_ ;
1470       /**
1471        * <code>optional bool isRecovering = 3;</code>
1472        */
1473       public boolean hasIsRecovering() {
1474         return ((bitField0_ & 0x00000004) == 0x00000004);
1475       }
1476       /**
1477        * <code>optional bool isRecovering = 3;</code>
1478        */
1479       public boolean getIsRecovering() {
1480         return isRecovering_;
1481       }
1482       /**
1483        * <code>optional bool isRecovering = 3;</code>
1484        */
1485       public Builder setIsRecovering(boolean value) {
1486         bitField0_ |= 0x00000004;
1487         isRecovering_ = value;
1488         onChanged();
1489         return this;
1490       }
1491       /**
1492        * <code>optional bool isRecovering = 3;</code>
1493        */
1494       public Builder clearIsRecovering() {
1495         bitField0_ = (bitField0_ & ~0x00000004);
1496         isRecovering_ = false;
1497         onChanged();
1498         return this;
1499       }
1500 
1501       // @@protoc_insertion_point(builder_scope:hbase.pb.GetRegionInfoResponse)
1502     }
1503 
1504     static {
1505       defaultInstance = new GetRegionInfoResponse(true);
1506       defaultInstance.initFields();
1507     }
1508 
1509     // @@protoc_insertion_point(class_scope:hbase.pb.GetRegionInfoResponse)
1510   }
1511 
1512   public interface GetStoreFileRequestOrBuilder
1513       extends com.google.protobuf.MessageOrBuilder {
1514 
1515     // required .hbase.pb.RegionSpecifier region = 1;
1516     /**
1517      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
1518      */
1519     boolean hasRegion();
1520     /**
1521      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
1522      */
1523     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
1524     /**
1525      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
1526      */
1527     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
1528 
1529     // repeated bytes family = 2;
1530     /**
1531      * <code>repeated bytes family = 2;</code>
1532      */
1533     java.util.List<com.google.protobuf.ByteString> getFamilyList();
1534     /**
1535      * <code>repeated bytes family = 2;</code>
1536      */
1537     int getFamilyCount();
1538     /**
1539      * <code>repeated bytes family = 2;</code>
1540      */
1541     com.google.protobuf.ByteString getFamily(int index);
1542   }
1543   /**
1544    * Protobuf type {@code hbase.pb.GetStoreFileRequest}
1545    *
1546    * <pre>
1547    **
1548    * Get a list of store files for a set of column families in a particular region.
1549    * If no column family is specified, get the store files for all column families.
1550    * </pre>
1551    */
1552   public static final class GetStoreFileRequest extends
1553       com.google.protobuf.GeneratedMessage
1554       implements GetStoreFileRequestOrBuilder {
1555     // Use GetStoreFileRequest.newBuilder() to construct.
1556     private GetStoreFileRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
1557       super(builder);
1558       this.unknownFields = builder.getUnknownFields();
1559     }
1560     private GetStoreFileRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
1561 
1562     private static final GetStoreFileRequest defaultInstance;
1563     public static GetStoreFileRequest getDefaultInstance() {
1564       return defaultInstance;
1565     }
1566 
1567     public GetStoreFileRequest getDefaultInstanceForType() {
1568       return defaultInstance;
1569     }
1570 
1571     private final com.google.protobuf.UnknownFieldSet unknownFields;
1572     @java.lang.Override
1573     public final com.google.protobuf.UnknownFieldSet
1574         getUnknownFields() {
1575       return this.unknownFields;
1576     }
1577     private GetStoreFileRequest(
1578         com.google.protobuf.CodedInputStream input,
1579         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1580         throws com.google.protobuf.InvalidProtocolBufferException {
1581       initFields();
1582       int mutable_bitField0_ = 0;
1583       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
1584           com.google.protobuf.UnknownFieldSet.newBuilder();
1585       try {
1586         boolean done = false;
1587         while (!done) {
1588           int tag = input.readTag();
1589           switch (tag) {
1590             case 0:
1591               done = true;
1592               break;
1593             default: {
1594               if (!parseUnknownField(input, unknownFields,
1595                                      extensionRegistry, tag)) {
1596                 done = true;
1597               }
1598               break;
1599             }
1600             case 10: {
1601               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null;
1602               if (((bitField0_ & 0x00000001) == 0x00000001)) {
1603                 subBuilder = region_.toBuilder();
1604               }
1605               region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry);
1606               if (subBuilder != null) {
1607                 subBuilder.mergeFrom(region_);
1608                 region_ = subBuilder.buildPartial();
1609               }
1610               bitField0_ |= 0x00000001;
1611               break;
1612             }
1613             case 18: {
1614               if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
1615                 family_ = new java.util.ArrayList<com.google.protobuf.ByteString>();
1616                 mutable_bitField0_ |= 0x00000002;
1617               }
1618               family_.add(input.readBytes());
1619               break;
1620             }
1621           }
1622         }
1623       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1624         throw e.setUnfinishedMessage(this);
1625       } catch (java.io.IOException e) {
1626         throw new com.google.protobuf.InvalidProtocolBufferException(
1627             e.getMessage()).setUnfinishedMessage(this);
1628       } finally {
1629         if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
1630           family_ = java.util.Collections.unmodifiableList(family_);
1631         }
1632         this.unknownFields = unknownFields.build();
1633         makeExtensionsImmutable();
1634       }
1635     }
1636     public static final com.google.protobuf.Descriptors.Descriptor
1637         getDescriptor() {
1638       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetStoreFileRequest_descriptor;
1639     }
1640 
1641     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1642         internalGetFieldAccessorTable() {
1643       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetStoreFileRequest_fieldAccessorTable
1644           .ensureFieldAccessorsInitialized(
1645               org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.Builder.class);
1646     }
1647 
1648     public static com.google.protobuf.Parser<GetStoreFileRequest> PARSER =
1649         new com.google.protobuf.AbstractParser<GetStoreFileRequest>() {
1650       public GetStoreFileRequest parsePartialFrom(
1651           com.google.protobuf.CodedInputStream input,
1652           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1653           throws com.google.protobuf.InvalidProtocolBufferException {
1654         return new GetStoreFileRequest(input, extensionRegistry);
1655       }
1656     };
1657 
1658     @java.lang.Override
1659     public com.google.protobuf.Parser<GetStoreFileRequest> getParserForType() {
1660       return PARSER;
1661     }
1662 
1663     private int bitField0_;
1664     // required .hbase.pb.RegionSpecifier region = 1;
1665     public static final int REGION_FIELD_NUMBER = 1;
1666     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_;
1667     /**
1668      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
1669      */
1670     public boolean hasRegion() {
1671       return ((bitField0_ & 0x00000001) == 0x00000001);
1672     }
1673     /**
1674      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
1675      */
1676     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
1677       return region_;
1678     }
1679     /**
1680      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
1681      */
1682     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
1683       return region_;
1684     }
1685 
1686     // repeated bytes family = 2;
1687     public static final int FAMILY_FIELD_NUMBER = 2;
1688     private java.util.List<com.google.protobuf.ByteString> family_;
1689     /**
1690      * <code>repeated bytes family = 2;</code>
1691      */
1692     public java.util.List<com.google.protobuf.ByteString>
1693         getFamilyList() {
1694       return family_;
1695     }
1696     /**
1697      * <code>repeated bytes family = 2;</code>
1698      */
1699     public int getFamilyCount() {
1700       return family_.size();
1701     }
1702     /**
1703      * <code>repeated bytes family = 2;</code>
1704      */
1705     public com.google.protobuf.ByteString getFamily(int index) {
1706       return family_.get(index);
1707     }
1708 
1709     private void initFields() {
1710       region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
1711       family_ = java.util.Collections.emptyList();
1712     }
1713     private byte memoizedIsInitialized = -1;
1714     public final boolean isInitialized() {
1715       byte isInitialized = memoizedIsInitialized;
1716       if (isInitialized != -1) return isInitialized == 1;
1717 
1718       if (!hasRegion()) {
1719         memoizedIsInitialized = 0;
1720         return false;
1721       }
1722       if (!getRegion().isInitialized()) {
1723         memoizedIsInitialized = 0;
1724         return false;
1725       }
1726       memoizedIsInitialized = 1;
1727       return true;
1728     }
1729 
1730     public void writeTo(com.google.protobuf.CodedOutputStream output)
1731                         throws java.io.IOException {
1732       getSerializedSize();
1733       if (((bitField0_ & 0x00000001) == 0x00000001)) {
1734         output.writeMessage(1, region_);
1735       }
1736       for (int i = 0; i < family_.size(); i++) {
1737         output.writeBytes(2, family_.get(i));
1738       }
1739       getUnknownFields().writeTo(output);
1740     }
1741 
1742     private int memoizedSerializedSize = -1;
1743     public int getSerializedSize() {
1744       int size = memoizedSerializedSize;
1745       if (size != -1) return size;
1746 
1747       size = 0;
1748       if (((bitField0_ & 0x00000001) == 0x00000001)) {
1749         size += com.google.protobuf.CodedOutputStream
1750           .computeMessageSize(1, region_);
1751       }
1752       {
1753         int dataSize = 0;
1754         for (int i = 0; i < family_.size(); i++) {
1755           dataSize += com.google.protobuf.CodedOutputStream
1756             .computeBytesSizeNoTag(family_.get(i));
1757         }
1758         size += dataSize;
1759         size += 1 * getFamilyList().size();
1760       }
1761       size += getUnknownFields().getSerializedSize();
1762       memoizedSerializedSize = size;
1763       return size;
1764     }
1765 
1766     private static final long serialVersionUID = 0L;
1767     @java.lang.Override
1768     protected java.lang.Object writeReplace()
1769         throws java.io.ObjectStreamException {
1770       return super.writeReplace();
1771     }
1772 
1773     @java.lang.Override
1774     public boolean equals(final java.lang.Object obj) {
1775       if (obj == this) {
1776        return true;
1777       }
1778       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest)) {
1779         return super.equals(obj);
1780       }
1781       org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest) obj;
1782 
1783       boolean result = true;
1784       result = result && (hasRegion() == other.hasRegion());
1785       if (hasRegion()) {
1786         result = result && getRegion()
1787             .equals(other.getRegion());
1788       }
1789       result = result && getFamilyList()
1790           .equals(other.getFamilyList());
1791       result = result &&
1792           getUnknownFields().equals(other.getUnknownFields());
1793       return result;
1794     }
1795 
1796     private int memoizedHashCode = 0;
1797     @java.lang.Override
1798     public int hashCode() {
1799       if (memoizedHashCode != 0) {
1800         return memoizedHashCode;
1801       }
1802       int hash = 41;
1803       hash = (19 * hash) + getDescriptorForType().hashCode();
1804       if (hasRegion()) {
1805         hash = (37 * hash) + REGION_FIELD_NUMBER;
1806         hash = (53 * hash) + getRegion().hashCode();
1807       }
1808       if (getFamilyCount() > 0) {
1809         hash = (37 * hash) + FAMILY_FIELD_NUMBER;
1810         hash = (53 * hash) + getFamilyList().hashCode();
1811       }
1812       hash = (29 * hash) + getUnknownFields().hashCode();
1813       memoizedHashCode = hash;
1814       return hash;
1815     }
1816 
1817     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom(
1818         com.google.protobuf.ByteString data)
1819         throws com.google.protobuf.InvalidProtocolBufferException {
1820       return PARSER.parseFrom(data);
1821     }
1822     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom(
1823         com.google.protobuf.ByteString data,
1824         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1825         throws com.google.protobuf.InvalidProtocolBufferException {
1826       return PARSER.parseFrom(data, extensionRegistry);
1827     }
1828     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom(byte[] data)
1829         throws com.google.protobuf.InvalidProtocolBufferException {
1830       return PARSER.parseFrom(data);
1831     }
1832     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom(
1833         byte[] data,
1834         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1835         throws com.google.protobuf.InvalidProtocolBufferException {
1836       return PARSER.parseFrom(data, extensionRegistry);
1837     }
1838     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom(java.io.InputStream input)
1839         throws java.io.IOException {
1840       return PARSER.parseFrom(input);
1841     }
1842     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom(
1843         java.io.InputStream input,
1844         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1845         throws java.io.IOException {
1846       return PARSER.parseFrom(input, extensionRegistry);
1847     }
1848     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseDelimitedFrom(java.io.InputStream input)
1849         throws java.io.IOException {
1850       return PARSER.parseDelimitedFrom(input);
1851     }
1852     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseDelimitedFrom(
1853         java.io.InputStream input,
1854         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1855         throws java.io.IOException {
1856       return PARSER.parseDelimitedFrom(input, extensionRegistry);
1857     }
1858     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom(
1859         com.google.protobuf.CodedInputStream input)
1860         throws java.io.IOException {
1861       return PARSER.parseFrom(input);
1862     }
1863     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom(
1864         com.google.protobuf.CodedInputStream input,
1865         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1866         throws java.io.IOException {
1867       return PARSER.parseFrom(input, extensionRegistry);
1868     }
1869 
1870     public static Builder newBuilder() { return Builder.create(); }
1871     public Builder newBuilderForType() { return newBuilder(); }
1872     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest prototype) {
1873       return newBuilder().mergeFrom(prototype);
1874     }
1875     public Builder toBuilder() { return newBuilder(this); }
1876 
1877     @java.lang.Override
1878     protected Builder newBuilderForType(
1879         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1880       Builder builder = new Builder(parent);
1881       return builder;
1882     }
1883     /**
1884      * Protobuf type {@code hbase.pb.GetStoreFileRequest}
1885      *
1886      * <pre>
1887      **
1888      * Get a list of store files for a set of column families in a particular region.
1889      * If no column family is specified, get the store files for all column families.
1890      * </pre>
1891      */
1892     public static final class Builder extends
1893         com.google.protobuf.GeneratedMessage.Builder<Builder>
1894        implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequestOrBuilder {
1895       public static final com.google.protobuf.Descriptors.Descriptor
1896           getDescriptor() {
1897         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetStoreFileRequest_descriptor;
1898       }
1899 
1900       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1901           internalGetFieldAccessorTable() {
1902         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetStoreFileRequest_fieldAccessorTable
1903             .ensureFieldAccessorsInitialized(
1904                 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.Builder.class);
1905       }
1906 
1907       // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.newBuilder()
1908       private Builder() {
1909         maybeForceBuilderInitialization();
1910       }
1911 
1912       private Builder(
1913           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1914         super(parent);
1915         maybeForceBuilderInitialization();
1916       }
1917       private void maybeForceBuilderInitialization() {
1918         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
1919           getRegionFieldBuilder();
1920         }
1921       }
1922       private static Builder create() {
1923         return new Builder();
1924       }
1925 
1926       public Builder clear() {
1927         super.clear();
1928         if (regionBuilder_ == null) {
1929           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
1930         } else {
1931           regionBuilder_.clear();
1932         }
1933         bitField0_ = (bitField0_ & ~0x00000001);
1934         family_ = java.util.Collections.emptyList();
1935         bitField0_ = (bitField0_ & ~0x00000002);
1936         return this;
1937       }
1938 
1939       public Builder clone() {
1940         return create().mergeFrom(buildPartial());
1941       }
1942 
1943       public com.google.protobuf.Descriptors.Descriptor
1944           getDescriptorForType() {
1945         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetStoreFileRequest_descriptor;
1946       }
1947 
1948       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest getDefaultInstanceForType() {
1949         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.getDefaultInstance();
1950       }
1951 
1952       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest build() {
1953         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest result = buildPartial();
1954         if (!result.isInitialized()) {
1955           throw newUninitializedMessageException(result);
1956         }
1957         return result;
1958       }
1959 
1960       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest buildPartial() {
1961         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest(this);
1962         int from_bitField0_ = bitField0_;
1963         int to_bitField0_ = 0;
1964         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
1965           to_bitField0_ |= 0x00000001;
1966         }
1967         if (regionBuilder_ == null) {
1968           result.region_ = region_;
1969         } else {
1970           result.region_ = regionBuilder_.build();
1971         }
1972         if (((bitField0_ & 0x00000002) == 0x00000002)) {
1973           family_ = java.util.Collections.unmodifiableList(family_);
1974           bitField0_ = (bitField0_ & ~0x00000002);
1975         }
1976         result.family_ = family_;
1977         result.bitField0_ = to_bitField0_;
1978         onBuilt();
1979         return result;
1980       }
1981 
1982       public Builder mergeFrom(com.google.protobuf.Message other) {
1983         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest) {
1984           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest)other);
1985         } else {
1986           super.mergeFrom(other);
1987           return this;
1988         }
1989       }
1990 
1991       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest other) {
1992         if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.getDefaultInstance()) return this;
1993         if (other.hasRegion()) {
1994           mergeRegion(other.getRegion());
1995         }
1996         if (!other.family_.isEmpty()) {
1997           if (family_.isEmpty()) {
1998             family_ = other.family_;
1999             bitField0_ = (bitField0_ & ~0x00000002);
2000           } else {
2001             ensureFamilyIsMutable();
2002             family_.addAll(other.family_);
2003           }
2004           onChanged();
2005         }
2006         this.mergeUnknownFields(other.getUnknownFields());
2007         return this;
2008       }
2009 
2010       public final boolean isInitialized() {
2011         if (!hasRegion()) {
2012           
2013           return false;
2014         }
2015         if (!getRegion().isInitialized()) {
2016           
2017           return false;
2018         }
2019         return true;
2020       }
2021 
2022       public Builder mergeFrom(
2023           com.google.protobuf.CodedInputStream input,
2024           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2025           throws java.io.IOException {
2026         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parsedMessage = null;
2027         try {
2028           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
2029         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
2030           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest) e.getUnfinishedMessage();
2031           throw e;
2032         } finally {
2033           if (parsedMessage != null) {
2034             mergeFrom(parsedMessage);
2035           }
2036         }
2037         return this;
2038       }
2039       private int bitField0_;
2040 
2041       // required .hbase.pb.RegionSpecifier region = 1;
2042       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
2043       private com.google.protobuf.SingleFieldBuilder<
2044           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_;
2045       /**
2046        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
2047        */
2048       public boolean hasRegion() {
2049         return ((bitField0_ & 0x00000001) == 0x00000001);
2050       }
2051       /**
2052        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
2053        */
2054       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
2055         if (regionBuilder_ == null) {
2056           return region_;
2057         } else {
2058           return regionBuilder_.getMessage();
2059         }
2060       }
2061       /**
2062        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
2063        */
2064       public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
2065         if (regionBuilder_ == null) {
2066           if (value == null) {
2067             throw new NullPointerException();
2068           }
2069           region_ = value;
2070           onChanged();
2071         } else {
2072           regionBuilder_.setMessage(value);
2073         }
2074         bitField0_ |= 0x00000001;
2075         return this;
2076       }
2077       /**
2078        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
2079        */
2080       public Builder setRegion(
2081           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
2082         if (regionBuilder_ == null) {
2083           region_ = builderForValue.build();
2084           onChanged();
2085         } else {
2086           regionBuilder_.setMessage(builderForValue.build());
2087         }
2088         bitField0_ |= 0x00000001;
2089         return this;
2090       }
2091       /**
2092        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
2093        */
2094       public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
2095         if (regionBuilder_ == null) {
2096           if (((bitField0_ & 0x00000001) == 0x00000001) &&
2097               region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) {
2098             region_ =
2099               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial();
2100           } else {
2101             region_ = value;
2102           }
2103           onChanged();
2104         } else {
2105           regionBuilder_.mergeFrom(value);
2106         }
2107         bitField0_ |= 0x00000001;
2108         return this;
2109       }
2110       /**
2111        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
2112        */
2113       public Builder clearRegion() {
2114         if (regionBuilder_ == null) {
2115           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
2116           onChanged();
2117         } else {
2118           regionBuilder_.clear();
2119         }
2120         bitField0_ = (bitField0_ & ~0x00000001);
2121         return this;
2122       }
2123       /**
2124        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
2125        */
2126       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() {
2127         bitField0_ |= 0x00000001;
2128         onChanged();
2129         return getRegionFieldBuilder().getBuilder();
2130       }
2131       /**
2132        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
2133        */
2134       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
2135         if (regionBuilder_ != null) {
2136           return regionBuilder_.getMessageOrBuilder();
2137         } else {
2138           return region_;
2139         }
2140       }
2141       /**
2142        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
2143        */
2144       private com.google.protobuf.SingleFieldBuilder<
2145           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> 
2146           getRegionFieldBuilder() {
2147         if (regionBuilder_ == null) {
2148           regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
2149               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
2150                   region_,
2151                   getParentForChildren(),
2152                   isClean());
2153           region_ = null;
2154         }
2155         return regionBuilder_;
2156       }
2157 
2158       // repeated bytes family = 2;
2159       private java.util.List<com.google.protobuf.ByteString> family_ = java.util.Collections.emptyList();
2160       private void ensureFamilyIsMutable() {
2161         if (!((bitField0_ & 0x00000002) == 0x00000002)) {
2162           family_ = new java.util.ArrayList<com.google.protobuf.ByteString>(family_);
2163           bitField0_ |= 0x00000002;
2164          }
2165       }
2166       /**
2167        * <code>repeated bytes family = 2;</code>
2168        */
2169       public java.util.List<com.google.protobuf.ByteString>
2170           getFamilyList() {
2171         return java.util.Collections.unmodifiableList(family_);
2172       }
2173       /**
2174        * <code>repeated bytes family = 2;</code>
2175        */
2176       public int getFamilyCount() {
2177         return family_.size();
2178       }
2179       /**
2180        * <code>repeated bytes family = 2;</code>
2181        */
2182       public com.google.protobuf.ByteString getFamily(int index) {
2183         return family_.get(index);
2184       }
2185       /**
2186        * <code>repeated bytes family = 2;</code>
2187        */
2188       public Builder setFamily(
2189           int index, com.google.protobuf.ByteString value) {
2190         if (value == null) {
2191     throw new NullPointerException();
2192   }
2193   ensureFamilyIsMutable();
2194         family_.set(index, value);
2195         onChanged();
2196         return this;
2197       }
2198       /**
2199        * <code>repeated bytes family = 2;</code>
2200        */
2201       public Builder addFamily(com.google.protobuf.ByteString value) {
2202         if (value == null) {
2203     throw new NullPointerException();
2204   }
2205   ensureFamilyIsMutable();
2206         family_.add(value);
2207         onChanged();
2208         return this;
2209       }
2210       /**
2211        * <code>repeated bytes family = 2;</code>
2212        */
2213       public Builder addAllFamily(
2214           java.lang.Iterable<? extends com.google.protobuf.ByteString> values) {
2215         ensureFamilyIsMutable();
2216         super.addAll(values, family_);
2217         onChanged();
2218         return this;
2219       }
2220       /**
2221        * <code>repeated bytes family = 2;</code>
2222        */
2223       public Builder clearFamily() {
2224         family_ = java.util.Collections.emptyList();
2225         bitField0_ = (bitField0_ & ~0x00000002);
2226         onChanged();
2227         return this;
2228       }
2229 
2230       // @@protoc_insertion_point(builder_scope:hbase.pb.GetStoreFileRequest)
2231     }
2232 
2233     static {
2234       defaultInstance = new GetStoreFileRequest(true);
2235       defaultInstance.initFields();
2236     }
2237 
2238     // @@protoc_insertion_point(class_scope:hbase.pb.GetStoreFileRequest)
2239   }
2240 
2241   public interface GetStoreFileResponseOrBuilder
2242       extends com.google.protobuf.MessageOrBuilder {
2243 
2244     // repeated string store_file = 1;
2245     /**
2246      * <code>repeated string store_file = 1;</code>
2247      */
2248     java.util.List<java.lang.String>
2249     getStoreFileList();
2250     /**
2251      * <code>repeated string store_file = 1;</code>
2252      */
2253     int getStoreFileCount();
2254     /**
2255      * <code>repeated string store_file = 1;</code>
2256      */
2257     java.lang.String getStoreFile(int index);
2258     /**
2259      * <code>repeated string store_file = 1;</code>
2260      */
2261     com.google.protobuf.ByteString
2262         getStoreFileBytes(int index);
2263   }
2264   /**
2265    * Protobuf type {@code hbase.pb.GetStoreFileResponse}
2266    */
2267   public static final class GetStoreFileResponse extends
2268       com.google.protobuf.GeneratedMessage
2269       implements GetStoreFileResponseOrBuilder {
2270     // Use GetStoreFileResponse.newBuilder() to construct.
2271     private GetStoreFileResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
2272       super(builder);
2273       this.unknownFields = builder.getUnknownFields();
2274     }
2275     private GetStoreFileResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
2276 
2277     private static final GetStoreFileResponse defaultInstance;
2278     public static GetStoreFileResponse getDefaultInstance() {
2279       return defaultInstance;
2280     }
2281 
2282     public GetStoreFileResponse getDefaultInstanceForType() {
2283       return defaultInstance;
2284     }
2285 
2286     private final com.google.protobuf.UnknownFieldSet unknownFields;
2287     @java.lang.Override
2288     public final com.google.protobuf.UnknownFieldSet
2289         getUnknownFields() {
2290       return this.unknownFields;
2291     }
2292     private GetStoreFileResponse(
2293         com.google.protobuf.CodedInputStream input,
2294         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2295         throws com.google.protobuf.InvalidProtocolBufferException {
2296       initFields();
2297       int mutable_bitField0_ = 0;
2298       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
2299           com.google.protobuf.UnknownFieldSet.newBuilder();
2300       try {
2301         boolean done = false;
2302         while (!done) {
2303           int tag = input.readTag();
2304           switch (tag) {
2305             case 0:
2306               done = true;
2307               break;
2308             default: {
2309               if (!parseUnknownField(input, unknownFields,
2310                                      extensionRegistry, tag)) {
2311                 done = true;
2312               }
2313               break;
2314             }
2315             case 10: {
2316               if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
2317                 storeFile_ = new com.google.protobuf.LazyStringArrayList();
2318                 mutable_bitField0_ |= 0x00000001;
2319               }
2320               storeFile_.add(input.readBytes());
2321               break;
2322             }
2323           }
2324         }
2325       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
2326         throw e.setUnfinishedMessage(this);
2327       } catch (java.io.IOException e) {
2328         throw new com.google.protobuf.InvalidProtocolBufferException(
2329             e.getMessage()).setUnfinishedMessage(this);
2330       } finally {
2331         if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
2332           storeFile_ = new com.google.protobuf.UnmodifiableLazyStringList(storeFile_);
2333         }
2334         this.unknownFields = unknownFields.build();
2335         makeExtensionsImmutable();
2336       }
2337     }
2338     public static final com.google.protobuf.Descriptors.Descriptor
2339         getDescriptor() {
2340       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetStoreFileResponse_descriptor;
2341     }
2342 
2343     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
2344         internalGetFieldAccessorTable() {
2345       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetStoreFileResponse_fieldAccessorTable
2346           .ensureFieldAccessorsInitialized(
2347               org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.Builder.class);
2348     }
2349 
2350     public static com.google.protobuf.Parser<GetStoreFileResponse> PARSER =
2351         new com.google.protobuf.AbstractParser<GetStoreFileResponse>() {
2352       public GetStoreFileResponse parsePartialFrom(
2353           com.google.protobuf.CodedInputStream input,
2354           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2355           throws com.google.protobuf.InvalidProtocolBufferException {
2356         return new GetStoreFileResponse(input, extensionRegistry);
2357       }
2358     };
2359 
2360     @java.lang.Override
2361     public com.google.protobuf.Parser<GetStoreFileResponse> getParserForType() {
2362       return PARSER;
2363     }
2364 
2365     // repeated string store_file = 1;
2366     public static final int STORE_FILE_FIELD_NUMBER = 1;
2367     private com.google.protobuf.LazyStringList storeFile_;
2368     /**
2369      * <code>repeated string store_file = 1;</code>
2370      */
2371     public java.util.List<java.lang.String>
2372         getStoreFileList() {
2373       return storeFile_;
2374     }
2375     /**
2376      * <code>repeated string store_file = 1;</code>
2377      */
2378     public int getStoreFileCount() {
2379       return storeFile_.size();
2380     }
2381     /**
2382      * <code>repeated string store_file = 1;</code>
2383      */
2384     public java.lang.String getStoreFile(int index) {
2385       return storeFile_.get(index);
2386     }
2387     /**
2388      * <code>repeated string store_file = 1;</code>
2389      */
2390     public com.google.protobuf.ByteString
2391         getStoreFileBytes(int index) {
2392       return storeFile_.getByteString(index);
2393     }
2394 
2395     private void initFields() {
2396       storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY;
2397     }
2398     private byte memoizedIsInitialized = -1;
2399     public final boolean isInitialized() {
2400       byte isInitialized = memoizedIsInitialized;
2401       if (isInitialized != -1) return isInitialized == 1;
2402 
2403       memoizedIsInitialized = 1;
2404       return true;
2405     }
2406 
2407     public void writeTo(com.google.protobuf.CodedOutputStream output)
2408                         throws java.io.IOException {
2409       getSerializedSize();
2410       for (int i = 0; i < storeFile_.size(); i++) {
2411         output.writeBytes(1, storeFile_.getByteString(i));
2412       }
2413       getUnknownFields().writeTo(output);
2414     }
2415 
2416     private int memoizedSerializedSize = -1;
2417     public int getSerializedSize() {
2418       int size = memoizedSerializedSize;
2419       if (size != -1) return size;
2420 
2421       size = 0;
2422       {
2423         int dataSize = 0;
2424         for (int i = 0; i < storeFile_.size(); i++) {
2425           dataSize += com.google.protobuf.CodedOutputStream
2426             .computeBytesSizeNoTag(storeFile_.getByteString(i));
2427         }
2428         size += dataSize;
2429         size += 1 * getStoreFileList().size();
2430       }
2431       size += getUnknownFields().getSerializedSize();
2432       memoizedSerializedSize = size;
2433       return size;
2434     }
2435 
2436     private static final long serialVersionUID = 0L;
2437     @java.lang.Override
2438     protected java.lang.Object writeReplace()
2439         throws java.io.ObjectStreamException {
2440       return super.writeReplace();
2441     }
2442 
2443     @java.lang.Override
2444     public boolean equals(final java.lang.Object obj) {
2445       if (obj == this) {
2446        return true;
2447       }
2448       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse)) {
2449         return super.equals(obj);
2450       }
2451       org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse) obj;
2452 
2453       boolean result = true;
2454       result = result && getStoreFileList()
2455           .equals(other.getStoreFileList());
2456       result = result &&
2457           getUnknownFields().equals(other.getUnknownFields());
2458       return result;
2459     }
2460 
2461     private int memoizedHashCode = 0;
2462     @java.lang.Override
2463     public int hashCode() {
2464       if (memoizedHashCode != 0) {
2465         return memoizedHashCode;
2466       }
2467       int hash = 41;
2468       hash = (19 * hash) + getDescriptorForType().hashCode();
2469       if (getStoreFileCount() > 0) {
2470         hash = (37 * hash) + STORE_FILE_FIELD_NUMBER;
2471         hash = (53 * hash) + getStoreFileList().hashCode();
2472       }
2473       hash = (29 * hash) + getUnknownFields().hashCode();
2474       memoizedHashCode = hash;
2475       return hash;
2476     }
2477 
2478     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom(
2479         com.google.protobuf.ByteString data)
2480         throws com.google.protobuf.InvalidProtocolBufferException {
2481       return PARSER.parseFrom(data);
2482     }
2483     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom(
2484         com.google.protobuf.ByteString data,
2485         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2486         throws com.google.protobuf.InvalidProtocolBufferException {
2487       return PARSER.parseFrom(data, extensionRegistry);
2488     }
2489     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom(byte[] data)
2490         throws com.google.protobuf.InvalidProtocolBufferException {
2491       return PARSER.parseFrom(data);
2492     }
2493     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom(
2494         byte[] data,
2495         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2496         throws com.google.protobuf.InvalidProtocolBufferException {
2497       return PARSER.parseFrom(data, extensionRegistry);
2498     }
2499     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom(java.io.InputStream input)
2500         throws java.io.IOException {
2501       return PARSER.parseFrom(input);
2502     }
2503     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom(
2504         java.io.InputStream input,
2505         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2506         throws java.io.IOException {
2507       return PARSER.parseFrom(input, extensionRegistry);
2508     }
2509     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseDelimitedFrom(java.io.InputStream input)
2510         throws java.io.IOException {
2511       return PARSER.parseDelimitedFrom(input);
2512     }
2513     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseDelimitedFrom(
2514         java.io.InputStream input,
2515         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2516         throws java.io.IOException {
2517       return PARSER.parseDelimitedFrom(input, extensionRegistry);
2518     }
2519     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom(
2520         com.google.protobuf.CodedInputStream input)
2521         throws java.io.IOException {
2522       return PARSER.parseFrom(input);
2523     }
2524     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom(
2525         com.google.protobuf.CodedInputStream input,
2526         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2527         throws java.io.IOException {
2528       return PARSER.parseFrom(input, extensionRegistry);
2529     }
2530 
2531     public static Builder newBuilder() { return Builder.create(); }
2532     public Builder newBuilderForType() { return newBuilder(); }
2533     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse prototype) {
2534       return newBuilder().mergeFrom(prototype);
2535     }
2536     public Builder toBuilder() { return newBuilder(this); }
2537 
2538     @java.lang.Override
2539     protected Builder newBuilderForType(
2540         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2541       Builder builder = new Builder(parent);
2542       return builder;
2543     }
2544     /**
2545      * Protobuf type {@code hbase.pb.GetStoreFileResponse}
2546      */
2547     public static final class Builder extends
2548         com.google.protobuf.GeneratedMessage.Builder<Builder>
2549        implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponseOrBuilder {
2550       public static final com.google.protobuf.Descriptors.Descriptor
2551           getDescriptor() {
2552         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetStoreFileResponse_descriptor;
2553       }
2554 
2555       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
2556           internalGetFieldAccessorTable() {
2557         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetStoreFileResponse_fieldAccessorTable
2558             .ensureFieldAccessorsInitialized(
2559                 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.Builder.class);
2560       }
2561 
2562       // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.newBuilder()
2563       private Builder() {
2564         maybeForceBuilderInitialization();
2565       }
2566 
2567       private Builder(
2568           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2569         super(parent);
2570         maybeForceBuilderInitialization();
2571       }
2572       private void maybeForceBuilderInitialization() {
2573         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
2574         }
2575       }
2576       private static Builder create() {
2577         return new Builder();
2578       }
2579 
2580       public Builder clear() {
2581         super.clear();
2582         storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY;
2583         bitField0_ = (bitField0_ & ~0x00000001);
2584         return this;
2585       }
2586 
2587       public Builder clone() {
2588         return create().mergeFrom(buildPartial());
2589       }
2590 
2591       public com.google.protobuf.Descriptors.Descriptor
2592           getDescriptorForType() {
2593         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetStoreFileResponse_descriptor;
2594       }
2595 
2596       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse getDefaultInstanceForType() {
2597         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.getDefaultInstance();
2598       }
2599 
2600       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse build() {
2601         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse result = buildPartial();
2602         if (!result.isInitialized()) {
2603           throw newUninitializedMessageException(result);
2604         }
2605         return result;
2606       }
2607 
2608       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse buildPartial() {
2609         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse(this);
2610         int from_bitField0_ = bitField0_;
2611         if (((bitField0_ & 0x00000001) == 0x00000001)) {
2612           storeFile_ = new com.google.protobuf.UnmodifiableLazyStringList(
2613               storeFile_);
2614           bitField0_ = (bitField0_ & ~0x00000001);
2615         }
2616         result.storeFile_ = storeFile_;
2617         onBuilt();
2618         return result;
2619       }
2620 
2621       public Builder mergeFrom(com.google.protobuf.Message other) {
2622         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse) {
2623           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse)other);
2624         } else {
2625           super.mergeFrom(other);
2626           return this;
2627         }
2628       }
2629 
2630       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse other) {
2631         if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.getDefaultInstance()) return this;
2632         if (!other.storeFile_.isEmpty()) {
2633           if (storeFile_.isEmpty()) {
2634             storeFile_ = other.storeFile_;
2635             bitField0_ = (bitField0_ & ~0x00000001);
2636           } else {
2637             ensureStoreFileIsMutable();
2638             storeFile_.addAll(other.storeFile_);
2639           }
2640           onChanged();
2641         }
2642         this.mergeUnknownFields(other.getUnknownFields());
2643         return this;
2644       }
2645 
2646       public final boolean isInitialized() {
2647         return true;
2648       }
2649 
2650       public Builder mergeFrom(
2651           com.google.protobuf.CodedInputStream input,
2652           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2653           throws java.io.IOException {
2654         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parsedMessage = null;
2655         try {
2656           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
2657         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
2658           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse) e.getUnfinishedMessage();
2659           throw e;
2660         } finally {
2661           if (parsedMessage != null) {
2662             mergeFrom(parsedMessage);
2663           }
2664         }
2665         return this;
2666       }
2667       private int bitField0_;
2668 
2669       // repeated string store_file = 1;
2670       private com.google.protobuf.LazyStringList storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY;
2671       private void ensureStoreFileIsMutable() {
2672         if (!((bitField0_ & 0x00000001) == 0x00000001)) {
2673           storeFile_ = new com.google.protobuf.LazyStringArrayList(storeFile_);
2674           bitField0_ |= 0x00000001;
2675          }
2676       }
2677       /**
2678        * <code>repeated string store_file = 1;</code>
2679        */
2680       public java.util.List<java.lang.String>
2681           getStoreFileList() {
2682         return java.util.Collections.unmodifiableList(storeFile_);
2683       }
2684       /**
2685        * <code>repeated string store_file = 1;</code>
2686        */
2687       public int getStoreFileCount() {
2688         return storeFile_.size();
2689       }
2690       /**
2691        * <code>repeated string store_file = 1;</code>
2692        */
2693       public java.lang.String getStoreFile(int index) {
2694         return storeFile_.get(index);
2695       }
2696       /**
2697        * <code>repeated string store_file = 1;</code>
2698        */
2699       public com.google.protobuf.ByteString
2700           getStoreFileBytes(int index) {
2701         return storeFile_.getByteString(index);
2702       }
2703       /**
2704        * <code>repeated string store_file = 1;</code>
2705        */
2706       public Builder setStoreFile(
2707           int index, java.lang.String value) {
2708         if (value == null) {
2709     throw new NullPointerException();
2710   }
2711   ensureStoreFileIsMutable();
2712         storeFile_.set(index, value);
2713         onChanged();
2714         return this;
2715       }
2716       /**
2717        * <code>repeated string store_file = 1;</code>
2718        */
2719       public Builder addStoreFile(
2720           java.lang.String value) {
2721         if (value == null) {
2722     throw new NullPointerException();
2723   }
2724   ensureStoreFileIsMutable();
2725         storeFile_.add(value);
2726         onChanged();
2727         return this;
2728       }
2729       /**
2730        * <code>repeated string store_file = 1;</code>
2731        */
2732       public Builder addAllStoreFile(
2733           java.lang.Iterable<java.lang.String> values) {
2734         ensureStoreFileIsMutable();
2735         super.addAll(values, storeFile_);
2736         onChanged();
2737         return this;
2738       }
2739       /**
2740        * <code>repeated string store_file = 1;</code>
2741        */
2742       public Builder clearStoreFile() {
2743         storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY;
2744         bitField0_ = (bitField0_ & ~0x00000001);
2745         onChanged();
2746         return this;
2747       }
2748       /**
2749        * <code>repeated string store_file = 1;</code>
2750        */
2751       public Builder addStoreFileBytes(
2752           com.google.protobuf.ByteString value) {
2753         if (value == null) {
2754     throw new NullPointerException();
2755   }
2756   ensureStoreFileIsMutable();
2757         storeFile_.add(value);
2758         onChanged();
2759         return this;
2760       }
2761 
2762       // @@protoc_insertion_point(builder_scope:hbase.pb.GetStoreFileResponse)
2763     }
2764 
2765     static {
2766       defaultInstance = new GetStoreFileResponse(true);
2767       defaultInstance.initFields();
2768     }
2769 
2770     // @@protoc_insertion_point(class_scope:hbase.pb.GetStoreFileResponse)
2771   }
2772 
2773   public interface GetOnlineRegionRequestOrBuilder
2774       extends com.google.protobuf.MessageOrBuilder {
2775   }
2776   /**
2777    * Protobuf type {@code hbase.pb.GetOnlineRegionRequest}
2778    */
2779   public static final class GetOnlineRegionRequest extends
2780       com.google.protobuf.GeneratedMessage
2781       implements GetOnlineRegionRequestOrBuilder {
2782     // Use GetOnlineRegionRequest.newBuilder() to construct.
2783     private GetOnlineRegionRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
2784       super(builder);
2785       this.unknownFields = builder.getUnknownFields();
2786     }
2787     private GetOnlineRegionRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
2788 
2789     private static final GetOnlineRegionRequest defaultInstance;
2790     public static GetOnlineRegionRequest getDefaultInstance() {
2791       return defaultInstance;
2792     }
2793 
2794     public GetOnlineRegionRequest getDefaultInstanceForType() {
2795       return defaultInstance;
2796     }
2797 
2798     private final com.google.protobuf.UnknownFieldSet unknownFields;
2799     @java.lang.Override
2800     public final com.google.protobuf.UnknownFieldSet
2801         getUnknownFields() {
2802       return this.unknownFields;
2803     }
2804     private GetOnlineRegionRequest(
2805         com.google.protobuf.CodedInputStream input,
2806         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2807         throws com.google.protobuf.InvalidProtocolBufferException {
2808       initFields();
2809       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
2810           com.google.protobuf.UnknownFieldSet.newBuilder();
2811       try {
2812         boolean done = false;
2813         while (!done) {
2814           int tag = input.readTag();
2815           switch (tag) {
2816             case 0:
2817               done = true;
2818               break;
2819             default: {
2820               if (!parseUnknownField(input, unknownFields,
2821                                      extensionRegistry, tag)) {
2822                 done = true;
2823               }
2824               break;
2825             }
2826           }
2827         }
2828       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
2829         throw e.setUnfinishedMessage(this);
2830       } catch (java.io.IOException e) {
2831         throw new com.google.protobuf.InvalidProtocolBufferException(
2832             e.getMessage()).setUnfinishedMessage(this);
2833       } finally {
2834         this.unknownFields = unknownFields.build();
2835         makeExtensionsImmutable();
2836       }
2837     }
2838     public static final com.google.protobuf.Descriptors.Descriptor
2839         getDescriptor() {
2840       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetOnlineRegionRequest_descriptor;
2841     }
2842 
2843     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
2844         internalGetFieldAccessorTable() {
2845       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetOnlineRegionRequest_fieldAccessorTable
2846           .ensureFieldAccessorsInitialized(
2847               org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.Builder.class);
2848     }
2849 
2850     public static com.google.protobuf.Parser<GetOnlineRegionRequest> PARSER =
2851         new com.google.protobuf.AbstractParser<GetOnlineRegionRequest>() {
2852       public GetOnlineRegionRequest parsePartialFrom(
2853           com.google.protobuf.CodedInputStream input,
2854           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2855           throws com.google.protobuf.InvalidProtocolBufferException {
2856         return new GetOnlineRegionRequest(input, extensionRegistry);
2857       }
2858     };
2859 
2860     @java.lang.Override
2861     public com.google.protobuf.Parser<GetOnlineRegionRequest> getParserForType() {
2862       return PARSER;
2863     }
2864 
2865     private void initFields() {
2866     }
2867     private byte memoizedIsInitialized = -1;
2868     public final boolean isInitialized() {
2869       byte isInitialized = memoizedIsInitialized;
2870       if (isInitialized != -1) return isInitialized == 1;
2871 
2872       memoizedIsInitialized = 1;
2873       return true;
2874     }
2875 
2876     public void writeTo(com.google.protobuf.CodedOutputStream output)
2877                         throws java.io.IOException {
2878       getSerializedSize();
2879       getUnknownFields().writeTo(output);
2880     }
2881 
2882     private int memoizedSerializedSize = -1;
2883     public int getSerializedSize() {
2884       int size = memoizedSerializedSize;
2885       if (size != -1) return size;
2886 
2887       size = 0;
2888       size += getUnknownFields().getSerializedSize();
2889       memoizedSerializedSize = size;
2890       return size;
2891     }
2892 
2893     private static final long serialVersionUID = 0L;
2894     @java.lang.Override
2895     protected java.lang.Object writeReplace()
2896         throws java.io.ObjectStreamException {
2897       return super.writeReplace();
2898     }
2899 
2900     @java.lang.Override
2901     public boolean equals(final java.lang.Object obj) {
2902       if (obj == this) {
2903        return true;
2904       }
2905       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest)) {
2906         return super.equals(obj);
2907       }
2908       org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest) obj;
2909 
2910       boolean result = true;
2911       result = result &&
2912           getUnknownFields().equals(other.getUnknownFields());
2913       return result;
2914     }
2915 
2916     private int memoizedHashCode = 0;
2917     @java.lang.Override
2918     public int hashCode() {
2919       if (memoizedHashCode != 0) {
2920         return memoizedHashCode;
2921       }
2922       int hash = 41;
2923       hash = (19 * hash) + getDescriptorForType().hashCode();
2924       hash = (29 * hash) + getUnknownFields().hashCode();
2925       memoizedHashCode = hash;
2926       return hash;
2927     }
2928 
2929     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom(
2930         com.google.protobuf.ByteString data)
2931         throws com.google.protobuf.InvalidProtocolBufferException {
2932       return PARSER.parseFrom(data);
2933     }
2934     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom(
2935         com.google.protobuf.ByteString data,
2936         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2937         throws com.google.protobuf.InvalidProtocolBufferException {
2938       return PARSER.parseFrom(data, extensionRegistry);
2939     }
2940     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom(byte[] data)
2941         throws com.google.protobuf.InvalidProtocolBufferException {
2942       return PARSER.parseFrom(data);
2943     }
2944     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom(
2945         byte[] data,
2946         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2947         throws com.google.protobuf.InvalidProtocolBufferException {
2948       return PARSER.parseFrom(data, extensionRegistry);
2949     }
2950     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom(java.io.InputStream input)
2951         throws java.io.IOException {
2952       return PARSER.parseFrom(input);
2953     }
2954     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom(
2955         java.io.InputStream input,
2956         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2957         throws java.io.IOException {
2958       return PARSER.parseFrom(input, extensionRegistry);
2959     }
2960     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseDelimitedFrom(java.io.InputStream input)
2961         throws java.io.IOException {
2962       return PARSER.parseDelimitedFrom(input);
2963     }
2964     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseDelimitedFrom(
2965         java.io.InputStream input,
2966         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2967         throws java.io.IOException {
2968       return PARSER.parseDelimitedFrom(input, extensionRegistry);
2969     }
2970     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom(
2971         com.google.protobuf.CodedInputStream input)
2972         throws java.io.IOException {
2973       return PARSER.parseFrom(input);
2974     }
2975     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom(
2976         com.google.protobuf.CodedInputStream input,
2977         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2978         throws java.io.IOException {
2979       return PARSER.parseFrom(input, extensionRegistry);
2980     }
2981 
2982     public static Builder newBuilder() { return Builder.create(); }
2983     public Builder newBuilderForType() { return newBuilder(); }
2984     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest prototype) {
2985       return newBuilder().mergeFrom(prototype);
2986     }
2987     public Builder toBuilder() { return newBuilder(this); }
2988 
2989     @java.lang.Override
2990     protected Builder newBuilderForType(
2991         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2992       Builder builder = new Builder(parent);
2993       return builder;
2994     }
2995     /**
2996      * Protobuf type {@code hbase.pb.GetOnlineRegionRequest}
2997      */
2998     public static final class Builder extends
2999         com.google.protobuf.GeneratedMessage.Builder<Builder>
3000        implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequestOrBuilder {
3001       public static final com.google.protobuf.Descriptors.Descriptor
3002           getDescriptor() {
3003         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetOnlineRegionRequest_descriptor;
3004       }
3005 
3006       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
3007           internalGetFieldAccessorTable() {
3008         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetOnlineRegionRequest_fieldAccessorTable
3009             .ensureFieldAccessorsInitialized(
3010                 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.Builder.class);
3011       }
3012 
3013       // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.newBuilder()
3014       private Builder() {
3015         maybeForceBuilderInitialization();
3016       }
3017 
3018       private Builder(
3019           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
3020         super(parent);
3021         maybeForceBuilderInitialization();
3022       }
3023       private void maybeForceBuilderInitialization() {
3024         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
3025         }
3026       }
3027       private static Builder create() {
3028         return new Builder();
3029       }
3030 
3031       public Builder clear() {
3032         super.clear();
3033         return this;
3034       }
3035 
3036       public Builder clone() {
3037         return create().mergeFrom(buildPartial());
3038       }
3039 
3040       public com.google.protobuf.Descriptors.Descriptor
3041           getDescriptorForType() {
3042         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetOnlineRegionRequest_descriptor;
3043       }
3044 
3045       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest getDefaultInstanceForType() {
3046         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.getDefaultInstance();
3047       }
3048 
3049       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest build() {
3050         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest result = buildPartial();
3051         if (!result.isInitialized()) {
3052           throw newUninitializedMessageException(result);
3053         }
3054         return result;
3055       }
3056 
3057       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest buildPartial() {
3058         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest(this);
3059         onBuilt();
3060         return result;
3061       }
3062 
3063       public Builder mergeFrom(com.google.protobuf.Message other) {
3064         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest) {
3065           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest)other);
3066         } else {
3067           super.mergeFrom(other);
3068           return this;
3069         }
3070       }
3071 
3072       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest other) {
3073         if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.getDefaultInstance()) return this;
3074         this.mergeUnknownFields(other.getUnknownFields());
3075         return this;
3076       }
3077 
3078       public final boolean isInitialized() {
3079         return true;
3080       }
3081 
3082       public Builder mergeFrom(
3083           com.google.protobuf.CodedInputStream input,
3084           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3085           throws java.io.IOException {
3086         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parsedMessage = null;
3087         try {
3088           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
3089         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
3090           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest) e.getUnfinishedMessage();
3091           throw e;
3092         } finally {
3093           if (parsedMessage != null) {
3094             mergeFrom(parsedMessage);
3095           }
3096         }
3097         return this;
3098       }
3099 
3100       // @@protoc_insertion_point(builder_scope:hbase.pb.GetOnlineRegionRequest)
3101     }
3102 
3103     static {
3104       defaultInstance = new GetOnlineRegionRequest(true);
3105       defaultInstance.initFields();
3106     }
3107 
3108     // @@protoc_insertion_point(class_scope:hbase.pb.GetOnlineRegionRequest)
3109   }
3110 
3111   public interface GetOnlineRegionResponseOrBuilder
3112       extends com.google.protobuf.MessageOrBuilder {
3113 
3114     // repeated .hbase.pb.RegionInfo region_info = 1;
3115     /**
3116      * <code>repeated .hbase.pb.RegionInfo region_info = 1;</code>
3117      */
3118     java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo> 
3119         getRegionInfoList();
3120     /**
3121      * <code>repeated .hbase.pb.RegionInfo region_info = 1;</code>
3122      */
3123     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(int index);
3124     /**
3125      * <code>repeated .hbase.pb.RegionInfo region_info = 1;</code>
3126      */
3127     int getRegionInfoCount();
3128     /**
3129      * <code>repeated .hbase.pb.RegionInfo region_info = 1;</code>
3130      */
3131     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> 
3132         getRegionInfoOrBuilderList();
3133     /**
3134      * <code>repeated .hbase.pb.RegionInfo region_info = 1;</code>
3135      */
3136     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder(
3137         int index);
3138   }
3139   /**
3140    * Protobuf type {@code hbase.pb.GetOnlineRegionResponse}
3141    */
3142   public static final class GetOnlineRegionResponse extends
3143       com.google.protobuf.GeneratedMessage
3144       implements GetOnlineRegionResponseOrBuilder {
3145     // Use GetOnlineRegionResponse.newBuilder() to construct.
3146     private GetOnlineRegionResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
3147       super(builder);
3148       this.unknownFields = builder.getUnknownFields();
3149     }
3150     private GetOnlineRegionResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
3151 
3152     private static final GetOnlineRegionResponse defaultInstance;
3153     public static GetOnlineRegionResponse getDefaultInstance() {
3154       return defaultInstance;
3155     }
3156 
3157     public GetOnlineRegionResponse getDefaultInstanceForType() {
3158       return defaultInstance;
3159     }
3160 
3161     private final com.google.protobuf.UnknownFieldSet unknownFields;
3162     @java.lang.Override
3163     public final com.google.protobuf.UnknownFieldSet
3164         getUnknownFields() {
3165       return this.unknownFields;
3166     }
3167     private GetOnlineRegionResponse(
3168         com.google.protobuf.CodedInputStream input,
3169         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3170         throws com.google.protobuf.InvalidProtocolBufferException {
3171       initFields();
3172       int mutable_bitField0_ = 0;
3173       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
3174           com.google.protobuf.UnknownFieldSet.newBuilder();
3175       try {
3176         boolean done = false;
3177         while (!done) {
3178           int tag = input.readTag();
3179           switch (tag) {
3180             case 0:
3181               done = true;
3182               break;
3183             default: {
3184               if (!parseUnknownField(input, unknownFields,
3185                                      extensionRegistry, tag)) {
3186                 done = true;
3187               }
3188               break;
3189             }
3190             case 10: {
3191               if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
3192                 regionInfo_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo>();
3193                 mutable_bitField0_ |= 0x00000001;
3194               }
3195               regionInfo_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.PARSER, extensionRegistry));
3196               break;
3197             }
3198           }
3199         }
3200       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
3201         throw e.setUnfinishedMessage(this);
3202       } catch (java.io.IOException e) {
3203         throw new com.google.protobuf.InvalidProtocolBufferException(
3204             e.getMessage()).setUnfinishedMessage(this);
3205       } finally {
3206         if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
3207           regionInfo_ = java.util.Collections.unmodifiableList(regionInfo_);
3208         }
3209         this.unknownFields = unknownFields.build();
3210         makeExtensionsImmutable();
3211       }
3212     }
3213     public static final com.google.protobuf.Descriptors.Descriptor
3214         getDescriptor() {
3215       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetOnlineRegionResponse_descriptor;
3216     }
3217 
3218     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
3219         internalGetFieldAccessorTable() {
3220       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetOnlineRegionResponse_fieldAccessorTable
3221           .ensureFieldAccessorsInitialized(
3222               org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.Builder.class);
3223     }
3224 
3225     public static com.google.protobuf.Parser<GetOnlineRegionResponse> PARSER =
3226         new com.google.protobuf.AbstractParser<GetOnlineRegionResponse>() {
3227       public GetOnlineRegionResponse parsePartialFrom(
3228           com.google.protobuf.CodedInputStream input,
3229           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3230           throws com.google.protobuf.InvalidProtocolBufferException {
3231         return new GetOnlineRegionResponse(input, extensionRegistry);
3232       }
3233     };
3234 
3235     @java.lang.Override
3236     public com.google.protobuf.Parser<GetOnlineRegionResponse> getParserForType() {
3237       return PARSER;
3238     }
3239 
3240     // repeated .hbase.pb.RegionInfo region_info = 1;
3241     public static final int REGION_INFO_FIELD_NUMBER = 1;
3242     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo> regionInfo_;
3243     /**
3244      * <code>repeated .hbase.pb.RegionInfo region_info = 1;</code>
3245      */
3246     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo> getRegionInfoList() {
3247       return regionInfo_;
3248     }
3249     /**
3250      * <code>repeated .hbase.pb.RegionInfo region_info = 1;</code>
3251      */
3252     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> 
3253         getRegionInfoOrBuilderList() {
3254       return regionInfo_;
3255     }
3256     /**
3257      * <code>repeated .hbase.pb.RegionInfo region_info = 1;</code>
3258      */
3259     public int getRegionInfoCount() {
3260       return regionInfo_.size();
3261     }
3262     /**
3263      * <code>repeated .hbase.pb.RegionInfo region_info = 1;</code>
3264      */
3265     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(int index) {
3266       return regionInfo_.get(index);
3267     }
3268     /**
3269      * <code>repeated .hbase.pb.RegionInfo region_info = 1;</code>
3270      */
3271     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder(
3272         int index) {
3273       return regionInfo_.get(index);
3274     }
3275 
3276     private void initFields() {
3277       regionInfo_ = java.util.Collections.emptyList();
3278     }
3279     private byte memoizedIsInitialized = -1;
3280     public final boolean isInitialized() {
3281       byte isInitialized = memoizedIsInitialized;
3282       if (isInitialized != -1) return isInitialized == 1;
3283 
3284       for (int i = 0; i < getRegionInfoCount(); i++) {
3285         if (!getRegionInfo(i).isInitialized()) {
3286           memoizedIsInitialized = 0;
3287           return false;
3288         }
3289       }
3290       memoizedIsInitialized = 1;
3291       return true;
3292     }
3293 
3294     public void writeTo(com.google.protobuf.CodedOutputStream output)
3295                         throws java.io.IOException {
3296       getSerializedSize();
3297       for (int i = 0; i < regionInfo_.size(); i++) {
3298         output.writeMessage(1, regionInfo_.get(i));
3299       }
3300       getUnknownFields().writeTo(output);
3301     }
3302 
3303     private int memoizedSerializedSize = -1;
3304     public int getSerializedSize() {
3305       int size = memoizedSerializedSize;
3306       if (size != -1) return size;
3307 
3308       size = 0;
3309       for (int i = 0; i < regionInfo_.size(); i++) {
3310         size += com.google.protobuf.CodedOutputStream
3311           .computeMessageSize(1, regionInfo_.get(i));
3312       }
3313       size += getUnknownFields().getSerializedSize();
3314       memoizedSerializedSize = size;
3315       return size;
3316     }
3317 
3318     private static final long serialVersionUID = 0L;
3319     @java.lang.Override
3320     protected java.lang.Object writeReplace()
3321         throws java.io.ObjectStreamException {
3322       return super.writeReplace();
3323     }
3324 
3325     @java.lang.Override
3326     public boolean equals(final java.lang.Object obj) {
3327       if (obj == this) {
3328        return true;
3329       }
3330       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse)) {
3331         return super.equals(obj);
3332       }
3333       org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse) obj;
3334 
3335       boolean result = true;
3336       result = result && getRegionInfoList()
3337           .equals(other.getRegionInfoList());
3338       result = result &&
3339           getUnknownFields().equals(other.getUnknownFields());
3340       return result;
3341     }
3342 
3343     private int memoizedHashCode = 0;
3344     @java.lang.Override
3345     public int hashCode() {
3346       if (memoizedHashCode != 0) {
3347         return memoizedHashCode;
3348       }
3349       int hash = 41;
3350       hash = (19 * hash) + getDescriptorForType().hashCode();
3351       if (getRegionInfoCount() > 0) {
3352         hash = (37 * hash) + REGION_INFO_FIELD_NUMBER;
3353         hash = (53 * hash) + getRegionInfoList().hashCode();
3354       }
3355       hash = (29 * hash) + getUnknownFields().hashCode();
3356       memoizedHashCode = hash;
3357       return hash;
3358     }
3359 
3360     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom(
3361         com.google.protobuf.ByteString data)
3362         throws com.google.protobuf.InvalidProtocolBufferException {
3363       return PARSER.parseFrom(data);
3364     }
3365     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom(
3366         com.google.protobuf.ByteString data,
3367         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3368         throws com.google.protobuf.InvalidProtocolBufferException {
3369       return PARSER.parseFrom(data, extensionRegistry);
3370     }
3371     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom(byte[] data)
3372         throws com.google.protobuf.InvalidProtocolBufferException {
3373       return PARSER.parseFrom(data);
3374     }
3375     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom(
3376         byte[] data,
3377         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3378         throws com.google.protobuf.InvalidProtocolBufferException {
3379       return PARSER.parseFrom(data, extensionRegistry);
3380     }
3381     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom(java.io.InputStream input)
3382         throws java.io.IOException {
3383       return PARSER.parseFrom(input);
3384     }
3385     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom(
3386         java.io.InputStream input,
3387         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3388         throws java.io.IOException {
3389       return PARSER.parseFrom(input, extensionRegistry);
3390     }
3391     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseDelimitedFrom(java.io.InputStream input)
3392         throws java.io.IOException {
3393       return PARSER.parseDelimitedFrom(input);
3394     }
3395     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseDelimitedFrom(
3396         java.io.InputStream input,
3397         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3398         throws java.io.IOException {
3399       return PARSER.parseDelimitedFrom(input, extensionRegistry);
3400     }
3401     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom(
3402         com.google.protobuf.CodedInputStream input)
3403         throws java.io.IOException {
3404       return PARSER.parseFrom(input);
3405     }
3406     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom(
3407         com.google.protobuf.CodedInputStream input,
3408         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3409         throws java.io.IOException {
3410       return PARSER.parseFrom(input, extensionRegistry);
3411     }
3412 
3413     public static Builder newBuilder() { return Builder.create(); }
3414     public Builder newBuilderForType() { return newBuilder(); }
3415     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse prototype) {
3416       return newBuilder().mergeFrom(prototype);
3417     }
3418     public Builder toBuilder() { return newBuilder(this); }
3419 
3420     @java.lang.Override
3421     protected Builder newBuilderForType(
3422         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
3423       Builder builder = new Builder(parent);
3424       return builder;
3425     }
3426     /**
3427      * Protobuf type {@code hbase.pb.GetOnlineRegionResponse}
3428      */
3429     public static final class Builder extends
3430         com.google.protobuf.GeneratedMessage.Builder<Builder>
3431        implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponseOrBuilder {
3432       public static final com.google.protobuf.Descriptors.Descriptor
3433           getDescriptor() {
3434         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetOnlineRegionResponse_descriptor;
3435       }
3436 
3437       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
3438           internalGetFieldAccessorTable() {
3439         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetOnlineRegionResponse_fieldAccessorTable
3440             .ensureFieldAccessorsInitialized(
3441                 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.Builder.class);
3442       }
3443 
3444       // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.newBuilder()
3445       private Builder() {
3446         maybeForceBuilderInitialization();
3447       }
3448 
3449       private Builder(
3450           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
3451         super(parent);
3452         maybeForceBuilderInitialization();
3453       }
3454       private void maybeForceBuilderInitialization() {
3455         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
3456           getRegionInfoFieldBuilder();
3457         }
3458       }
3459       private static Builder create() {
3460         return new Builder();
3461       }
3462 
3463       public Builder clear() {
3464         super.clear();
3465         if (regionInfoBuilder_ == null) {
3466           regionInfo_ = java.util.Collections.emptyList();
3467           bitField0_ = (bitField0_ & ~0x00000001);
3468         } else {
3469           regionInfoBuilder_.clear();
3470         }
3471         return this;
3472       }
3473 
3474       public Builder clone() {
3475         return create().mergeFrom(buildPartial());
3476       }
3477 
3478       public com.google.protobuf.Descriptors.Descriptor
3479           getDescriptorForType() {
3480         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetOnlineRegionResponse_descriptor;
3481       }
3482 
3483       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse getDefaultInstanceForType() {
3484         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDefaultInstance();
3485       }
3486 
3487       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse build() {
3488         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse result = buildPartial();
3489         if (!result.isInitialized()) {
3490           throw newUninitializedMessageException(result);
3491         }
3492         return result;
3493       }
3494 
3495       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse buildPartial() {
3496         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse(this);
3497         int from_bitField0_ = bitField0_;
3498         if (regionInfoBuilder_ == null) {
3499           if (((bitField0_ & 0x00000001) == 0x00000001)) {
3500             regionInfo_ = java.util.Collections.unmodifiableList(regionInfo_);
3501             bitField0_ = (bitField0_ & ~0x00000001);
3502           }
3503           result.regionInfo_ = regionInfo_;
3504         } else {
3505           result.regionInfo_ = regionInfoBuilder_.build();
3506         }
3507         onBuilt();
3508         return result;
3509       }
3510 
3511       public Builder mergeFrom(com.google.protobuf.Message other) {
3512         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse) {
3513           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse)other);
3514         } else {
3515           super.mergeFrom(other);
3516           return this;
3517         }
3518       }
3519 
3520       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse other) {
3521         if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDefaultInstance()) return this;
3522         if (regionInfoBuilder_ == null) {
3523           if (!other.regionInfo_.isEmpty()) {
3524             if (regionInfo_.isEmpty()) {
3525               regionInfo_ = other.regionInfo_;
3526               bitField0_ = (bitField0_ & ~0x00000001);
3527             } else {
3528               ensureRegionInfoIsMutable();
3529               regionInfo_.addAll(other.regionInfo_);
3530             }
3531             onChanged();
3532           }
3533         } else {
3534           if (!other.regionInfo_.isEmpty()) {
3535             if (regionInfoBuilder_.isEmpty()) {
3536               regionInfoBuilder_.dispose();
3537               regionInfoBuilder_ = null;
3538               regionInfo_ = other.regionInfo_;
3539               bitField0_ = (bitField0_ & ~0x00000001);
3540               regionInfoBuilder_ = 
3541                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
3542                    getRegionInfoFieldBuilder() : null;
3543             } else {
3544               regionInfoBuilder_.addAllMessages(other.regionInfo_);
3545             }
3546           }
3547         }
3548         this.mergeUnknownFields(other.getUnknownFields());
3549         return this;
3550       }
3551 
3552       public final boolean isInitialized() {
3553         for (int i = 0; i < getRegionInfoCount(); i++) {
3554           if (!getRegionInfo(i).isInitialized()) {
3555             
3556             return false;
3557           }
3558         }
3559         return true;
3560       }
3561 
3562       public Builder mergeFrom(
3563           com.google.protobuf.CodedInputStream input,
3564           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3565           throws java.io.IOException {
3566         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parsedMessage = null;
3567         try {
3568           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
3569         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
3570           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse) e.getUnfinishedMessage();
3571           throw e;
3572         } finally {
3573           if (parsedMessage != null) {
3574             mergeFrom(parsedMessage);
3575           }
3576         }
3577         return this;
3578       }
3579       private int bitField0_;
3580 
3581       // repeated .hbase.pb.RegionInfo region_info = 1;
3582       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo> regionInfo_ =
3583         java.util.Collections.emptyList();
3584       private void ensureRegionInfoIsMutable() {
3585         if (!((bitField0_ & 0x00000001) == 0x00000001)) {
3586           regionInfo_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo>(regionInfo_);
3587           bitField0_ |= 0x00000001;
3588          }
3589       }
3590 
3591       private com.google.protobuf.RepeatedFieldBuilder<
3592           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionInfoBuilder_;
3593 
3594       /**
3595        * <code>repeated .hbase.pb.RegionInfo region_info = 1;</code>
3596        */
3597       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo> getRegionInfoList() {
3598         if (regionInfoBuilder_ == null) {
3599           return java.util.Collections.unmodifiableList(regionInfo_);
3600         } else {
3601           return regionInfoBuilder_.getMessageList();
3602         }
3603       }
3604       /**
3605        * <code>repeated .hbase.pb.RegionInfo region_info = 1;</code>
3606        */
3607       public int getRegionInfoCount() {
3608         if (regionInfoBuilder_ == null) {
3609           return regionInfo_.size();
3610         } else {
3611           return regionInfoBuilder_.getCount();
3612         }
3613       }
3614       /**
3615        * <code>repeated .hbase.pb.RegionInfo region_info = 1;</code>
3616        */
3617       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(int index) {
3618         if (regionInfoBuilder_ == null) {
3619           return regionInfo_.get(index);
3620         } else {
3621           return regionInfoBuilder_.getMessage(index);
3622         }
3623       }
3624       /**
3625        * <code>repeated .hbase.pb.RegionInfo region_info = 1;</code>
3626        */
3627       public Builder setRegionInfo(
3628           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) {
3629         if (regionInfoBuilder_ == null) {
3630           if (value == null) {
3631             throw new NullPointerException();
3632           }
3633           ensureRegionInfoIsMutable();
3634           regionInfo_.set(index, value);
3635           onChanged();
3636         } else {
3637           regionInfoBuilder_.setMessage(index, value);
3638         }
3639         return this;
3640       }
3641       /**
3642        * <code>repeated .hbase.pb.RegionInfo region_info = 1;</code>
3643        */
3644       public Builder setRegionInfo(
3645           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) {
3646         if (regionInfoBuilder_ == null) {
3647           ensureRegionInfoIsMutable();
3648           regionInfo_.set(index, builderForValue.build());
3649           onChanged();
3650         } else {
3651           regionInfoBuilder_.setMessage(index, builderForValue.build());
3652         }
3653         return this;
3654       }
3655       /**
3656        * <code>repeated .hbase.pb.RegionInfo region_info = 1;</code>
3657        */
3658       public Builder addRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) {
3659         if (regionInfoBuilder_ == null) {
3660           if (value == null) {
3661             throw new NullPointerException();
3662           }
3663           ensureRegionInfoIsMutable();
3664           regionInfo_.add(value);
3665           onChanged();
3666         } else {
3667           regionInfoBuilder_.addMessage(value);
3668         }
3669         return this;
3670       }
3671       /**
3672        * <code>repeated .hbase.pb.RegionInfo region_info = 1;</code>
3673        */
3674       public Builder addRegionInfo(
3675           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) {
3676         if (regionInfoBuilder_ == null) {
3677           if (value == null) {
3678             throw new NullPointerException();
3679           }
3680           ensureRegionInfoIsMutable();
3681           regionInfo_.add(index, value);
3682           onChanged();
3683         } else {
3684           regionInfoBuilder_.addMessage(index, value);
3685         }
3686         return this;
3687       }
3688       /**
3689        * <code>repeated .hbase.pb.RegionInfo region_info = 1;</code>
3690        */
3691       public Builder addRegionInfo(
3692           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) {
3693         if (regionInfoBuilder_ == null) {
3694           ensureRegionInfoIsMutable();
3695           regionInfo_.add(builderForValue.build());
3696           onChanged();
3697         } else {
3698           regionInfoBuilder_.addMessage(builderForValue.build());
3699         }
3700         return this;
3701       }
3702       /**
3703        * <code>repeated .hbase.pb.RegionInfo region_info = 1;</code>
3704        */
3705       public Builder addRegionInfo(
3706           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) {
3707         if (regionInfoBuilder_ == null) {
3708           ensureRegionInfoIsMutable();
3709           regionInfo_.add(index, builderForValue.build());
3710           onChanged();
3711         } else {
3712           regionInfoBuilder_.addMessage(index, builderForValue.build());
3713         }
3714         return this;
3715       }
3716       /**
3717        * <code>repeated .hbase.pb.RegionInfo region_info = 1;</code>
3718        */
3719       public Builder addAllRegionInfo(
3720           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo> values) {
3721         if (regionInfoBuilder_ == null) {
3722           ensureRegionInfoIsMutable();
3723           super.addAll(values, regionInfo_);
3724           onChanged();
3725         } else {
3726           regionInfoBuilder_.addAllMessages(values);
3727         }
3728         return this;
3729       }
3730       /**
3731        * <code>repeated .hbase.pb.RegionInfo region_info = 1;</code>
3732        */
3733       public Builder clearRegionInfo() {
3734         if (regionInfoBuilder_ == null) {
3735           regionInfo_ = java.util.Collections.emptyList();
3736           bitField0_ = (bitField0_ & ~0x00000001);
3737           onChanged();
3738         } else {
3739           regionInfoBuilder_.clear();
3740         }
3741         return this;
3742       }
3743       /**
3744        * <code>repeated .hbase.pb.RegionInfo region_info = 1;</code>
3745        */
3746       public Builder removeRegionInfo(int index) {
3747         if (regionInfoBuilder_ == null) {
3748           ensureRegionInfoIsMutable();
3749           regionInfo_.remove(index);
3750           onChanged();
3751         } else {
3752           regionInfoBuilder_.remove(index);
3753         }
3754         return this;
3755       }
3756       /**
3757        * <code>repeated .hbase.pb.RegionInfo region_info = 1;</code>
3758        */
3759       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder getRegionInfoBuilder(
3760           int index) {
3761         return getRegionInfoFieldBuilder().getBuilder(index);
3762       }
3763       /**
3764        * <code>repeated .hbase.pb.RegionInfo region_info = 1;</code>
3765        */
3766       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder(
3767           int index) {
3768         if (regionInfoBuilder_ == null) {
3769           return regionInfo_.get(index);  } else {
3770           return regionInfoBuilder_.getMessageOrBuilder(index);
3771         }
3772       }
3773       /**
3774        * <code>repeated .hbase.pb.RegionInfo region_info = 1;</code>
3775        */
3776       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> 
3777            getRegionInfoOrBuilderList() {
3778         if (regionInfoBuilder_ != null) {
3779           return regionInfoBuilder_.getMessageOrBuilderList();
3780         } else {
3781           return java.util.Collections.unmodifiableList(regionInfo_);
3782         }
3783       }
3784       /**
3785        * <code>repeated .hbase.pb.RegionInfo region_info = 1;</code>
3786        */
3787       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder addRegionInfoBuilder() {
3788         return getRegionInfoFieldBuilder().addBuilder(
3789             org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance());
3790       }
3791       /**
3792        * <code>repeated .hbase.pb.RegionInfo region_info = 1;</code>
3793        */
3794       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder addRegionInfoBuilder(
3795           int index) {
3796         return getRegionInfoFieldBuilder().addBuilder(
3797             index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance());
3798       }
3799       /**
3800        * <code>repeated .hbase.pb.RegionInfo region_info = 1;</code>
3801        */
3802       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder> 
3803            getRegionInfoBuilderList() {
3804         return getRegionInfoFieldBuilder().getBuilderList();
3805       }
3806       private com.google.protobuf.RepeatedFieldBuilder<
3807           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> 
3808           getRegionInfoFieldBuilder() {
3809         if (regionInfoBuilder_ == null) {
3810           regionInfoBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
3811               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>(
3812                   regionInfo_,
3813                   ((bitField0_ & 0x00000001) == 0x00000001),
3814                   getParentForChildren(),
3815                   isClean());
3816           regionInfo_ = null;
3817         }
3818         return regionInfoBuilder_;
3819       }
3820 
3821       // @@protoc_insertion_point(builder_scope:hbase.pb.GetOnlineRegionResponse)
3822     }
3823 
3824     static {
3825       defaultInstance = new GetOnlineRegionResponse(true);
3826       defaultInstance.initFields();
3827     }
3828 
3829     // @@protoc_insertion_point(class_scope:hbase.pb.GetOnlineRegionResponse)
3830   }
3831 
3832   public interface OpenRegionRequestOrBuilder
3833       extends com.google.protobuf.MessageOrBuilder {
3834 
3835     // repeated .hbase.pb.OpenRegionRequest.RegionOpenInfo open_info = 1;
3836     /**
3837      * <code>repeated .hbase.pb.OpenRegionRequest.RegionOpenInfo open_info = 1;</code>
3838      */
3839     java.util.List<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo> 
3840         getOpenInfoList();
3841     /**
3842      * <code>repeated .hbase.pb.OpenRegionRequest.RegionOpenInfo open_info = 1;</code>
3843      */
3844     org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo getOpenInfo(int index);
3845     /**
3846      * <code>repeated .hbase.pb.OpenRegionRequest.RegionOpenInfo open_info = 1;</code>
3847      */
3848     int getOpenInfoCount();
3849     /**
3850      * <code>repeated .hbase.pb.OpenRegionRequest.RegionOpenInfo open_info = 1;</code>
3851      */
3852     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder> 
3853         getOpenInfoOrBuilderList();
3854     /**
3855      * <code>repeated .hbase.pb.OpenRegionRequest.RegionOpenInfo open_info = 1;</code>
3856      */
3857     org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder getOpenInfoOrBuilder(
3858         int index);
3859 
3860     // optional uint64 serverStartCode = 2;
3861     /**
3862      * <code>optional uint64 serverStartCode = 2;</code>
3863      *
3864      * <pre>
3865      * the intended server for this RPC.
3866      * </pre>
3867      */
3868     boolean hasServerStartCode();
3869     /**
3870      * <code>optional uint64 serverStartCode = 2;</code>
3871      *
3872      * <pre>
3873      * the intended server for this RPC.
3874      * </pre>
3875      */
3876     long getServerStartCode();
3877 
3878     // optional uint64 master_system_time = 5;
3879     /**
3880      * <code>optional uint64 master_system_time = 5;</code>
3881      *
3882      * <pre>
3883      * wall clock time from master
3884      * </pre>
3885      */
3886     boolean hasMasterSystemTime();
3887     /**
3888      * <code>optional uint64 master_system_time = 5;</code>
3889      *
3890      * <pre>
3891      * wall clock time from master
3892      * </pre>
3893      */
3894     long getMasterSystemTime();
3895   }
3896   /**
3897    * Protobuf type {@code hbase.pb.OpenRegionRequest}
3898    */
3899   public static final class OpenRegionRequest extends
3900       com.google.protobuf.GeneratedMessage
3901       implements OpenRegionRequestOrBuilder {
3902     // Use OpenRegionRequest.newBuilder() to construct.
3903     private OpenRegionRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
3904       super(builder);
3905       this.unknownFields = builder.getUnknownFields();
3906     }
3907     private OpenRegionRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
3908 
3909     private static final OpenRegionRequest defaultInstance;
3910     public static OpenRegionRequest getDefaultInstance() {
3911       return defaultInstance;
3912     }
3913 
3914     public OpenRegionRequest getDefaultInstanceForType() {
3915       return defaultInstance;
3916     }
3917 
3918     private final com.google.protobuf.UnknownFieldSet unknownFields;
3919     @java.lang.Override
3920     public final com.google.protobuf.UnknownFieldSet
3921         getUnknownFields() {
3922       return this.unknownFields;
3923     }
3924     private OpenRegionRequest(
3925         com.google.protobuf.CodedInputStream input,
3926         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3927         throws com.google.protobuf.InvalidProtocolBufferException {
3928       initFields();
3929       int mutable_bitField0_ = 0;
3930       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
3931           com.google.protobuf.UnknownFieldSet.newBuilder();
3932       try {
3933         boolean done = false;
3934         while (!done) {
3935           int tag = input.readTag();
3936           switch (tag) {
3937             case 0:
3938               done = true;
3939               break;
3940             default: {
3941               if (!parseUnknownField(input, unknownFields,
3942                                      extensionRegistry, tag)) {
3943                 done = true;
3944               }
3945               break;
3946             }
3947             case 10: {
3948               if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
3949                 openInfo_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo>();
3950                 mutable_bitField0_ |= 0x00000001;
3951               }
3952               openInfo_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.PARSER, extensionRegistry));
3953               break;
3954             }
3955             case 16: {
3956               bitField0_ |= 0x00000001;
3957               serverStartCode_ = input.readUInt64();
3958               break;
3959             }
3960             case 40: {
3961               bitField0_ |= 0x00000002;
3962               masterSystemTime_ = input.readUInt64();
3963               break;
3964             }
3965           }
3966         }
3967       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
3968         throw e.setUnfinishedMessage(this);
3969       } catch (java.io.IOException e) {
3970         throw new com.google.protobuf.InvalidProtocolBufferException(
3971             e.getMessage()).setUnfinishedMessage(this);
3972       } finally {
3973         if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
3974           openInfo_ = java.util.Collections.unmodifiableList(openInfo_);
3975         }
3976         this.unknownFields = unknownFields.build();
3977         makeExtensionsImmutable();
3978       }
3979     }
3980     public static final com.google.protobuf.Descriptors.Descriptor
3981         getDescriptor() {
3982       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_OpenRegionRequest_descriptor;
3983     }
3984 
3985     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
3986         internalGetFieldAccessorTable() {
3987       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_OpenRegionRequest_fieldAccessorTable
3988           .ensureFieldAccessorsInitialized(
3989               org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.Builder.class);
3990     }
3991 
3992     public static com.google.protobuf.Parser<OpenRegionRequest> PARSER =
3993         new com.google.protobuf.AbstractParser<OpenRegionRequest>() {
3994       public OpenRegionRequest parsePartialFrom(
3995           com.google.protobuf.CodedInputStream input,
3996           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3997           throws com.google.protobuf.InvalidProtocolBufferException {
3998         return new OpenRegionRequest(input, extensionRegistry);
3999       }
4000     };
4001 
4002     @java.lang.Override
4003     public com.google.protobuf.Parser<OpenRegionRequest> getParserForType() {
4004       return PARSER;
4005     }
4006 
4007     public interface RegionOpenInfoOrBuilder
4008         extends com.google.protobuf.MessageOrBuilder {
4009 
4010       // required .hbase.pb.RegionInfo region = 1;
4011       /**
4012        * <code>required .hbase.pb.RegionInfo region = 1;</code>
4013        */
4014       boolean hasRegion();
4015       /**
4016        * <code>required .hbase.pb.RegionInfo region = 1;</code>
4017        */
4018       org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegion();
4019       /**
4020        * <code>required .hbase.pb.RegionInfo region = 1;</code>
4021        */
4022       org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionOrBuilder();
4023 
4024       // optional uint32 version_of_offline_node = 2;
4025       /**
4026        * <code>optional uint32 version_of_offline_node = 2;</code>
4027        */
4028       boolean hasVersionOfOfflineNode();
4029       /**
4030        * <code>optional uint32 version_of_offline_node = 2;</code>
4031        */
4032       int getVersionOfOfflineNode();
4033 
4034       // repeated .hbase.pb.ServerName favored_nodes = 3;
4035       /**
4036        * <code>repeated .hbase.pb.ServerName favored_nodes = 3;</code>
4037        */
4038       java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName> 
4039           getFavoredNodesList();
4040       /**
4041        * <code>repeated .hbase.pb.ServerName favored_nodes = 3;</code>
4042        */
4043       org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getFavoredNodes(int index);
4044       /**
4045        * <code>repeated .hbase.pb.ServerName favored_nodes = 3;</code>
4046        */
4047       int getFavoredNodesCount();
4048       /**
4049        * <code>repeated .hbase.pb.ServerName favored_nodes = 3;</code>
4050        */
4051       java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> 
4052           getFavoredNodesOrBuilderList();
4053       /**
4054        * <code>repeated .hbase.pb.ServerName favored_nodes = 3;</code>
4055        */
4056       org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getFavoredNodesOrBuilder(
4057           int index);
4058 
4059       // optional bool openForDistributedLogReplay = 4;
4060       /**
4061        * <code>optional bool openForDistributedLogReplay = 4;</code>
4062        *
4063        * <pre>
4064        * open region for distributedLogReplay
4065        * </pre>
4066        */
4067       boolean hasOpenForDistributedLogReplay();
4068       /**
4069        * <code>optional bool openForDistributedLogReplay = 4;</code>
4070        *
4071        * <pre>
4072        * open region for distributedLogReplay
4073        * </pre>
4074        */
4075       boolean getOpenForDistributedLogReplay();
4076     }
4077     /**
4078      * Protobuf type {@code hbase.pb.OpenRegionRequest.RegionOpenInfo}
4079      */
4080     public static final class RegionOpenInfo extends
4081         com.google.protobuf.GeneratedMessage
4082         implements RegionOpenInfoOrBuilder {
4083       // Use RegionOpenInfo.newBuilder() to construct.
4084       private RegionOpenInfo(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
4085         super(builder);
4086         this.unknownFields = builder.getUnknownFields();
4087       }
4088       private RegionOpenInfo(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
4089 
4090       private static final RegionOpenInfo defaultInstance;
4091       public static RegionOpenInfo getDefaultInstance() {
4092         return defaultInstance;
4093       }
4094 
4095       public RegionOpenInfo getDefaultInstanceForType() {
4096         return defaultInstance;
4097       }
4098 
4099       private final com.google.protobuf.UnknownFieldSet unknownFields;
4100       @java.lang.Override
4101       public final com.google.protobuf.UnknownFieldSet
4102           getUnknownFields() {
4103         return this.unknownFields;
4104       }
4105       private RegionOpenInfo(
4106           com.google.protobuf.CodedInputStream input,
4107           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4108           throws com.google.protobuf.InvalidProtocolBufferException {
4109         initFields();
4110         int mutable_bitField0_ = 0;
4111         com.google.protobuf.UnknownFieldSet.Builder unknownFields =
4112             com.google.protobuf.UnknownFieldSet.newBuilder();
4113         try {
4114           boolean done = false;
4115           while (!done) {
4116             int tag = input.readTag();
4117             switch (tag) {
4118               case 0:
4119                 done = true;
4120                 break;
4121               default: {
4122                 if (!parseUnknownField(input, unknownFields,
4123                                        extensionRegistry, tag)) {
4124                   done = true;
4125                 }
4126                 break;
4127               }
4128               case 10: {
4129                 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder subBuilder = null;
4130                 if (((bitField0_ & 0x00000001) == 0x00000001)) {
4131                   subBuilder = region_.toBuilder();
4132                 }
4133                 region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.PARSER, extensionRegistry);
4134                 if (subBuilder != null) {
4135                   subBuilder.mergeFrom(region_);
4136                   region_ = subBuilder.buildPartial();
4137                 }
4138                 bitField0_ |= 0x00000001;
4139                 break;
4140               }
4141               case 16: {
4142                 bitField0_ |= 0x00000002;
4143                 versionOfOfflineNode_ = input.readUInt32();
4144                 break;
4145               }
4146               case 26: {
4147                 if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
4148                   favoredNodes_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName>();
4149                   mutable_bitField0_ |= 0x00000004;
4150                 }
4151                 favoredNodes_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry));
4152                 break;
4153               }
4154               case 32: {
4155                 bitField0_ |= 0x00000004;
4156                 openForDistributedLogReplay_ = input.readBool();
4157                 break;
4158               }
4159             }
4160           }
4161         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
4162           throw e.setUnfinishedMessage(this);
4163         } catch (java.io.IOException e) {
4164           throw new com.google.protobuf.InvalidProtocolBufferException(
4165               e.getMessage()).setUnfinishedMessage(this);
4166         } finally {
4167           if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
4168             favoredNodes_ = java.util.Collections.unmodifiableList(favoredNodes_);
4169           }
4170           this.unknownFields = unknownFields.build();
4171           makeExtensionsImmutable();
4172         }
4173       }
4174       public static final com.google.protobuf.Descriptors.Descriptor
4175           getDescriptor() {
4176         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_OpenRegionRequest_RegionOpenInfo_descriptor;
4177       }
4178 
4179       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
4180           internalGetFieldAccessorTable() {
4181         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_OpenRegionRequest_RegionOpenInfo_fieldAccessorTable
4182             .ensureFieldAccessorsInitialized(
4183                 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder.class);
4184       }
4185 
4186       public static com.google.protobuf.Parser<RegionOpenInfo> PARSER =
4187           new com.google.protobuf.AbstractParser<RegionOpenInfo>() {
4188         public RegionOpenInfo parsePartialFrom(
4189             com.google.protobuf.CodedInputStream input,
4190             com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4191             throws com.google.protobuf.InvalidProtocolBufferException {
4192           return new RegionOpenInfo(input, extensionRegistry);
4193         }
4194       };
4195 
4196       @java.lang.Override
4197       public com.google.protobuf.Parser<RegionOpenInfo> getParserForType() {
4198         return PARSER;
4199       }
4200 
4201       private int bitField0_;
4202       // required .hbase.pb.RegionInfo region = 1;
4203       public static final int REGION_FIELD_NUMBER = 1;
4204       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo region_;
4205       /**
4206        * <code>required .hbase.pb.RegionInfo region = 1;</code>
4207        */
4208       public boolean hasRegion() {
4209         return ((bitField0_ & 0x00000001) == 0x00000001);
4210       }
4211       /**
4212        * <code>required .hbase.pb.RegionInfo region = 1;</code>
4213        */
4214       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegion() {
4215         return region_;
4216       }
4217       /**
4218        * <code>required .hbase.pb.RegionInfo region = 1;</code>
4219        */
4220       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionOrBuilder() {
4221         return region_;
4222       }
4223 
4224       // optional uint32 version_of_offline_node = 2;
4225       public static final int VERSION_OF_OFFLINE_NODE_FIELD_NUMBER = 2;
4226       private int versionOfOfflineNode_;
4227       /**
4228        * <code>optional uint32 version_of_offline_node = 2;</code>
4229        */
4230       public boolean hasVersionOfOfflineNode() {
4231         return ((bitField0_ & 0x00000002) == 0x00000002);
4232       }
4233       /**
4234        * <code>optional uint32 version_of_offline_node = 2;</code>
4235        */
4236       public int getVersionOfOfflineNode() {
4237         return versionOfOfflineNode_;
4238       }
4239 
4240       // repeated .hbase.pb.ServerName favored_nodes = 3;
4241       public static final int FAVORED_NODES_FIELD_NUMBER = 3;
4242       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName> favoredNodes_;
4243       /**
4244        * <code>repeated .hbase.pb.ServerName favored_nodes = 3;</code>
4245        */
4246       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName> getFavoredNodesList() {
4247         return favoredNodes_;
4248       }
4249       /**
4250        * <code>repeated .hbase.pb.ServerName favored_nodes = 3;</code>
4251        */
4252       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> 
4253           getFavoredNodesOrBuilderList() {
4254         return favoredNodes_;
4255       }
4256       /**
4257        * <code>repeated .hbase.pb.ServerName favored_nodes = 3;</code>
4258        */
4259       public int getFavoredNodesCount() {
4260         return favoredNodes_.size();
4261       }
4262       /**
4263        * <code>repeated .hbase.pb.ServerName favored_nodes = 3;</code>
4264        */
4265       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getFavoredNodes(int index) {
4266         return favoredNodes_.get(index);
4267       }
4268       /**
4269        * <code>repeated .hbase.pb.ServerName favored_nodes = 3;</code>
4270        */
4271       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getFavoredNodesOrBuilder(
4272           int index) {
4273         return favoredNodes_.get(index);
4274       }
4275 
4276       // optional bool openForDistributedLogReplay = 4;
4277       public static final int OPENFORDISTRIBUTEDLOGREPLAY_FIELD_NUMBER = 4;
4278       private boolean openForDistributedLogReplay_;
4279       /**
4280        * <code>optional bool openForDistributedLogReplay = 4;</code>
4281        *
4282        * <pre>
4283        * open region for distributedLogReplay
4284        * </pre>
4285        */
4286       public boolean hasOpenForDistributedLogReplay() {
4287         return ((bitField0_ & 0x00000004) == 0x00000004);
4288       }
4289       /**
4290        * <code>optional bool openForDistributedLogReplay = 4;</code>
4291        *
4292        * <pre>
4293        * open region for distributedLogReplay
4294        * </pre>
4295        */
4296       public boolean getOpenForDistributedLogReplay() {
4297         return openForDistributedLogReplay_;
4298       }
4299 
4300       private void initFields() {
4301         region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance();
4302         versionOfOfflineNode_ = 0;
4303         favoredNodes_ = java.util.Collections.emptyList();
4304         openForDistributedLogReplay_ = false;
4305       }
4306       private byte memoizedIsInitialized = -1;
4307       public final boolean isInitialized() {
4308         byte isInitialized = memoizedIsInitialized;
4309         if (isInitialized != -1) return isInitialized == 1;
4310 
4311         if (!hasRegion()) {
4312           memoizedIsInitialized = 0;
4313           return false;
4314         }
4315         if (!getRegion().isInitialized()) {
4316           memoizedIsInitialized = 0;
4317           return false;
4318         }
4319         for (int i = 0; i < getFavoredNodesCount(); i++) {
4320           if (!getFavoredNodes(i).isInitialized()) {
4321             memoizedIsInitialized = 0;
4322             return false;
4323           }
4324         }
4325         memoizedIsInitialized = 1;
4326         return true;
4327       }
4328 
4329       public void writeTo(com.google.protobuf.CodedOutputStream output)
4330                           throws java.io.IOException {
4331         getSerializedSize();
4332         if (((bitField0_ & 0x00000001) == 0x00000001)) {
4333           output.writeMessage(1, region_);
4334         }
4335         if (((bitField0_ & 0x00000002) == 0x00000002)) {
4336           output.writeUInt32(2, versionOfOfflineNode_);
4337         }
4338         for (int i = 0; i < favoredNodes_.size(); i++) {
4339           output.writeMessage(3, favoredNodes_.get(i));
4340         }
4341         if (((bitField0_ & 0x00000004) == 0x00000004)) {
4342           output.writeBool(4, openForDistributedLogReplay_);
4343         }
4344         getUnknownFields().writeTo(output);
4345       }
4346 
4347       private int memoizedSerializedSize = -1;
4348       public int getSerializedSize() {
4349         int size = memoizedSerializedSize;
4350         if (size != -1) return size;
4351 
4352         size = 0;
4353         if (((bitField0_ & 0x00000001) == 0x00000001)) {
4354           size += com.google.protobuf.CodedOutputStream
4355             .computeMessageSize(1, region_);
4356         }
4357         if (((bitField0_ & 0x00000002) == 0x00000002)) {
4358           size += com.google.protobuf.CodedOutputStream
4359             .computeUInt32Size(2, versionOfOfflineNode_);
4360         }
4361         for (int i = 0; i < favoredNodes_.size(); i++) {
4362           size += com.google.protobuf.CodedOutputStream
4363             .computeMessageSize(3, favoredNodes_.get(i));
4364         }
4365         if (((bitField0_ & 0x00000004) == 0x00000004)) {
4366           size += com.google.protobuf.CodedOutputStream
4367             .computeBoolSize(4, openForDistributedLogReplay_);
4368         }
4369         size += getUnknownFields().getSerializedSize();
4370         memoizedSerializedSize = size;
4371         return size;
4372       }
4373 
4374       private static final long serialVersionUID = 0L;
4375       @java.lang.Override
4376       protected java.lang.Object writeReplace()
4377           throws java.io.ObjectStreamException {
4378         return super.writeReplace();
4379       }
4380 
4381       @java.lang.Override
4382       public boolean equals(final java.lang.Object obj) {
4383         if (obj == this) {
4384          return true;
4385         }
4386         if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo)) {
4387           return super.equals(obj);
4388         }
4389         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo) obj;
4390 
4391         boolean result = true;
4392         result = result && (hasRegion() == other.hasRegion());
4393         if (hasRegion()) {
4394           result = result && getRegion()
4395               .equals(other.getRegion());
4396         }
4397         result = result && (hasVersionOfOfflineNode() == other.hasVersionOfOfflineNode());
4398         if (hasVersionOfOfflineNode()) {
4399           result = result && (getVersionOfOfflineNode()
4400               == other.getVersionOfOfflineNode());
4401         }
4402         result = result && getFavoredNodesList()
4403             .equals(other.getFavoredNodesList());
4404         result = result && (hasOpenForDistributedLogReplay() == other.hasOpenForDistributedLogReplay());
4405         if (hasOpenForDistributedLogReplay()) {
4406           result = result && (getOpenForDistributedLogReplay()
4407               == other.getOpenForDistributedLogReplay());
4408         }
4409         result = result &&
4410             getUnknownFields().equals(other.getUnknownFields());
4411         return result;
4412       }
4413 
4414       private int memoizedHashCode = 0;
4415       @java.lang.Override
4416       public int hashCode() {
4417         if (memoizedHashCode != 0) {
4418           return memoizedHashCode;
4419         }
4420         int hash = 41;
4421         hash = (19 * hash) + getDescriptorForType().hashCode();
4422         if (hasRegion()) {
4423           hash = (37 * hash) + REGION_FIELD_NUMBER;
4424           hash = (53 * hash) + getRegion().hashCode();
4425         }
4426         if (hasVersionOfOfflineNode()) {
4427           hash = (37 * hash) + VERSION_OF_OFFLINE_NODE_FIELD_NUMBER;
4428           hash = (53 * hash) + getVersionOfOfflineNode();
4429         }
4430         if (getFavoredNodesCount() > 0) {
4431           hash = (37 * hash) + FAVORED_NODES_FIELD_NUMBER;
4432           hash = (53 * hash) + getFavoredNodesList().hashCode();
4433         }
4434         if (hasOpenForDistributedLogReplay()) {
4435           hash = (37 * hash) + OPENFORDISTRIBUTEDLOGREPLAY_FIELD_NUMBER;
4436           hash = (53 * hash) + hashBoolean(getOpenForDistributedLogReplay());
4437         }
4438         hash = (29 * hash) + getUnknownFields().hashCode();
4439         memoizedHashCode = hash;
4440         return hash;
4441       }
4442 
4443       public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom(
4444           com.google.protobuf.ByteString data)
4445           throws com.google.protobuf.InvalidProtocolBufferException {
4446         return PARSER.parseFrom(data);
4447       }
4448       public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom(
4449           com.google.protobuf.ByteString data,
4450           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4451           throws com.google.protobuf.InvalidProtocolBufferException {
4452         return PARSER.parseFrom(data, extensionRegistry);
4453       }
4454       public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom(byte[] data)
4455           throws com.google.protobuf.InvalidProtocolBufferException {
4456         return PARSER.parseFrom(data);
4457       }
4458       public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom(
4459           byte[] data,
4460           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4461           throws com.google.protobuf.InvalidProtocolBufferException {
4462         return PARSER.parseFrom(data, extensionRegistry);
4463       }
4464       public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom(java.io.InputStream input)
4465           throws java.io.IOException {
4466         return PARSER.parseFrom(input);
4467       }
4468       public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom(
4469           java.io.InputStream input,
4470           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4471           throws java.io.IOException {
4472         return PARSER.parseFrom(input, extensionRegistry);
4473       }
4474       public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseDelimitedFrom(java.io.InputStream input)
4475           throws java.io.IOException {
4476         return PARSER.parseDelimitedFrom(input);
4477       }
4478       public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseDelimitedFrom(
4479           java.io.InputStream input,
4480           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4481           throws java.io.IOException {
4482         return PARSER.parseDelimitedFrom(input, extensionRegistry);
4483       }
4484       public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom(
4485           com.google.protobuf.CodedInputStream input)
4486           throws java.io.IOException {
4487         return PARSER.parseFrom(input);
4488       }
4489       public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom(
4490           com.google.protobuf.CodedInputStream input,
4491           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4492           throws java.io.IOException {
4493         return PARSER.parseFrom(input, extensionRegistry);
4494       }
4495 
4496       public static Builder newBuilder() { return Builder.create(); }
4497       public Builder newBuilderForType() { return newBuilder(); }
4498       public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo prototype) {
4499         return newBuilder().mergeFrom(prototype);
4500       }
4501       public Builder toBuilder() { return newBuilder(this); }
4502 
4503       @java.lang.Override
4504       protected Builder newBuilderForType(
4505           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
4506         Builder builder = new Builder(parent);
4507         return builder;
4508       }
4509       /**
4510        * Protobuf type {@code hbase.pb.OpenRegionRequest.RegionOpenInfo}
4511        */
4512       public static final class Builder extends
4513           com.google.protobuf.GeneratedMessage.Builder<Builder>
4514          implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder {
4515         public static final com.google.protobuf.Descriptors.Descriptor
4516             getDescriptor() {
4517           return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_OpenRegionRequest_RegionOpenInfo_descriptor;
4518         }
4519 
4520         protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
4521             internalGetFieldAccessorTable() {
4522           return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_OpenRegionRequest_RegionOpenInfo_fieldAccessorTable
4523               .ensureFieldAccessorsInitialized(
4524                   org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder.class);
4525         }
4526 
4527         // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.newBuilder()
4528         private Builder() {
4529           maybeForceBuilderInitialization();
4530         }
4531 
4532         private Builder(
4533             com.google.protobuf.GeneratedMessage.BuilderParent parent) {
4534           super(parent);
4535           maybeForceBuilderInitialization();
4536         }
4537         private void maybeForceBuilderInitialization() {
4538           if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
4539             getRegionFieldBuilder();
4540             getFavoredNodesFieldBuilder();
4541           }
4542         }
4543         private static Builder create() {
4544           return new Builder();
4545         }
4546 
4547         public Builder clear() {
4548           super.clear();
4549           if (regionBuilder_ == null) {
4550             region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance();
4551           } else {
4552             regionBuilder_.clear();
4553           }
4554           bitField0_ = (bitField0_ & ~0x00000001);
4555           versionOfOfflineNode_ = 0;
4556           bitField0_ = (bitField0_ & ~0x00000002);
4557           if (favoredNodesBuilder_ == null) {
4558             favoredNodes_ = java.util.Collections.emptyList();
4559             bitField0_ = (bitField0_ & ~0x00000004);
4560           } else {
4561             favoredNodesBuilder_.clear();
4562           }
4563           openForDistributedLogReplay_ = false;
4564           bitField0_ = (bitField0_ & ~0x00000008);
4565           return this;
4566         }
4567 
4568         public Builder clone() {
4569           return create().mergeFrom(buildPartial());
4570         }
4571 
4572         public com.google.protobuf.Descriptors.Descriptor
4573             getDescriptorForType() {
4574           return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_OpenRegionRequest_RegionOpenInfo_descriptor;
4575         }
4576 
4577         public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo getDefaultInstanceForType() {
4578           return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.getDefaultInstance();
4579         }
4580 
4581         public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo build() {
4582           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo result = buildPartial();
4583           if (!result.isInitialized()) {
4584             throw newUninitializedMessageException(result);
4585           }
4586           return result;
4587         }
4588 
4589         public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo buildPartial() {
4590           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo(this);
4591           int from_bitField0_ = bitField0_;
4592           int to_bitField0_ = 0;
4593           if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
4594             to_bitField0_ |= 0x00000001;
4595           }
4596           if (regionBuilder_ == null) {
4597             result.region_ = region_;
4598           } else {
4599             result.region_ = regionBuilder_.build();
4600           }
4601           if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
4602             to_bitField0_ |= 0x00000002;
4603           }
4604           result.versionOfOfflineNode_ = versionOfOfflineNode_;
4605           if (favoredNodesBuilder_ == null) {
4606             if (((bitField0_ & 0x00000004) == 0x00000004)) {
4607               favoredNodes_ = java.util.Collections.unmodifiableList(favoredNodes_);
4608               bitField0_ = (bitField0_ & ~0x00000004);
4609             }
4610             result.favoredNodes_ = favoredNodes_;
4611           } else {
4612             result.favoredNodes_ = favoredNodesBuilder_.build();
4613           }
4614           if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
4615             to_bitField0_ |= 0x00000004;
4616           }
4617           result.openForDistributedLogReplay_ = openForDistributedLogReplay_;
4618           result.bitField0_ = to_bitField0_;
4619           onBuilt();
4620           return result;
4621         }
4622 
4623         public Builder mergeFrom(com.google.protobuf.Message other) {
4624           if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo) {
4625             return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo)other);
4626           } else {
4627             super.mergeFrom(other);
4628             return this;
4629           }
4630         }
4631 
4632         public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo other) {
4633           if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.getDefaultInstance()) return this;
4634           if (other.hasRegion()) {
4635             mergeRegion(other.getRegion());
4636           }
4637           if (other.hasVersionOfOfflineNode()) {
4638             setVersionOfOfflineNode(other.getVersionOfOfflineNode());
4639           }
4640           if (favoredNodesBuilder_ == null) {
4641             if (!other.favoredNodes_.isEmpty()) {
4642               if (favoredNodes_.isEmpty()) {
4643                 favoredNodes_ = other.favoredNodes_;
4644                 bitField0_ = (bitField0_ & ~0x00000004);
4645               } else {
4646                 ensureFavoredNodesIsMutable();
4647                 favoredNodes_.addAll(other.favoredNodes_);
4648               }
4649               onChanged();
4650             }
4651           } else {
4652             if (!other.favoredNodes_.isEmpty()) {
4653               if (favoredNodesBuilder_.isEmpty()) {
4654                 favoredNodesBuilder_.dispose();
4655                 favoredNodesBuilder_ = null;
4656                 favoredNodes_ = other.favoredNodes_;
4657                 bitField0_ = (bitField0_ & ~0x00000004);
4658                 favoredNodesBuilder_ = 
4659                   com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
4660                      getFavoredNodesFieldBuilder() : null;
4661               } else {
4662                 favoredNodesBuilder_.addAllMessages(other.favoredNodes_);
4663               }
4664             }
4665           }
4666           if (other.hasOpenForDistributedLogReplay()) {
4667             setOpenForDistributedLogReplay(other.getOpenForDistributedLogReplay());
4668           }
4669           this.mergeUnknownFields(other.getUnknownFields());
4670           return this;
4671         }
4672 
4673         public final boolean isInitialized() {
4674           if (!hasRegion()) {
4675             
4676             return false;
4677           }
4678           if (!getRegion().isInitialized()) {
4679             
4680             return false;
4681           }
4682           for (int i = 0; i < getFavoredNodesCount(); i++) {
4683             if (!getFavoredNodes(i).isInitialized()) {
4684               
4685               return false;
4686             }
4687           }
4688           return true;
4689         }
4690 
4691         public Builder mergeFrom(
4692             com.google.protobuf.CodedInputStream input,
4693             com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4694             throws java.io.IOException {
4695           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parsedMessage = null;
4696           try {
4697             parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
4698           } catch (com.google.protobuf.InvalidProtocolBufferException e) {
4699             parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo) e.getUnfinishedMessage();
4700             throw e;
4701           } finally {
4702             if (parsedMessage != null) {
4703               mergeFrom(parsedMessage);
4704             }
4705           }
4706           return this;
4707         }
4708         private int bitField0_;
4709 
4710         // required .hbase.pb.RegionInfo region = 1;
4711         private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance();
4712         private com.google.protobuf.SingleFieldBuilder<
4713             org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionBuilder_;
4714         /**
4715          * <code>required .hbase.pb.RegionInfo region = 1;</code>
4716          */
4717         public boolean hasRegion() {
4718           return ((bitField0_ & 0x00000001) == 0x00000001);
4719         }
4720         /**
4721          * <code>required .hbase.pb.RegionInfo region = 1;</code>
4722          */
4723         public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegion() {
4724           if (regionBuilder_ == null) {
4725             return region_;
4726           } else {
4727             return regionBuilder_.getMessage();
4728           }
4729         }
4730         /**
4731          * <code>required .hbase.pb.RegionInfo region = 1;</code>
4732          */
4733         public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) {
4734           if (regionBuilder_ == null) {
4735             if (value == null) {
4736               throw new NullPointerException();
4737             }
4738             region_ = value;
4739             onChanged();
4740           } else {
4741             regionBuilder_.setMessage(value);
4742           }
4743           bitField0_ |= 0x00000001;
4744           return this;
4745         }
4746         /**
4747          * <code>required .hbase.pb.RegionInfo region = 1;</code>
4748          */
4749         public Builder setRegion(
4750             org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) {
4751           if (regionBuilder_ == null) {
4752             region_ = builderForValue.build();
4753             onChanged();
4754           } else {
4755             regionBuilder_.setMessage(builderForValue.build());
4756           }
4757           bitField0_ |= 0x00000001;
4758           return this;
4759         }
4760         /**
4761          * <code>required .hbase.pb.RegionInfo region = 1;</code>
4762          */
4763         public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) {
4764           if (regionBuilder_ == null) {
4765             if (((bitField0_ & 0x00000001) == 0x00000001) &&
4766                 region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance()) {
4767               region_ =
4768                 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.newBuilder(region_).mergeFrom(value).buildPartial();
4769             } else {
4770               region_ = value;
4771             }
4772             onChanged();
4773           } else {
4774             regionBuilder_.mergeFrom(value);
4775           }
4776           bitField0_ |= 0x00000001;
4777           return this;
4778         }
4779         /**
4780          * <code>required .hbase.pb.RegionInfo region = 1;</code>
4781          */
4782         public Builder clearRegion() {
4783           if (regionBuilder_ == null) {
4784             region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance();
4785             onChanged();
4786           } else {
4787             regionBuilder_.clear();
4788           }
4789           bitField0_ = (bitField0_ & ~0x00000001);
4790           return this;
4791         }
4792         /**
4793          * <code>required .hbase.pb.RegionInfo region = 1;</code>
4794          */
4795         public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder getRegionBuilder() {
4796           bitField0_ |= 0x00000001;
4797           onChanged();
4798           return getRegionFieldBuilder().getBuilder();
4799         }
4800         /**
4801          * <code>required .hbase.pb.RegionInfo region = 1;</code>
4802          */
4803         public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionOrBuilder() {
4804           if (regionBuilder_ != null) {
4805             return regionBuilder_.getMessageOrBuilder();
4806           } else {
4807             return region_;
4808           }
4809         }
4810         /**
4811          * <code>required .hbase.pb.RegionInfo region = 1;</code>
4812          */
4813         private com.google.protobuf.SingleFieldBuilder<
4814             org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> 
4815             getRegionFieldBuilder() {
4816           if (regionBuilder_ == null) {
4817             regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
4818                 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>(
4819                     region_,
4820                     getParentForChildren(),
4821                     isClean());
4822             region_ = null;
4823           }
4824           return regionBuilder_;
4825         }
4826 
4827         // optional uint32 version_of_offline_node = 2;
4828         private int versionOfOfflineNode_ ;
4829         /**
4830          * <code>optional uint32 version_of_offline_node = 2;</code>
4831          */
4832         public boolean hasVersionOfOfflineNode() {
4833           return ((bitField0_ & 0x00000002) == 0x00000002);
4834         }
4835         /**
4836          * <code>optional uint32 version_of_offline_node = 2;</code>
4837          */
4838         public int getVersionOfOfflineNode() {
4839           return versionOfOfflineNode_;
4840         }
4841         /**
4842          * <code>optional uint32 version_of_offline_node = 2;</code>
4843          */
4844         public Builder setVersionOfOfflineNode(int value) {
4845           bitField0_ |= 0x00000002;
4846           versionOfOfflineNode_ = value;
4847           onChanged();
4848           return this;
4849         }
4850         /**
4851          * <code>optional uint32 version_of_offline_node = 2;</code>
4852          */
4853         public Builder clearVersionOfOfflineNode() {
4854           bitField0_ = (bitField0_ & ~0x00000002);
4855           versionOfOfflineNode_ = 0;
4856           onChanged();
4857           return this;
4858         }
4859 
4860         // repeated .hbase.pb.ServerName favored_nodes = 3;
4861         private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName> favoredNodes_ =
4862           java.util.Collections.emptyList();
4863         private void ensureFavoredNodesIsMutable() {
4864           if (!((bitField0_ & 0x00000004) == 0x00000004)) {
4865             favoredNodes_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName>(favoredNodes_);
4866             bitField0_ |= 0x00000004;
4867            }
4868         }
4869 
4870         private com.google.protobuf.RepeatedFieldBuilder<
4871             org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> favoredNodesBuilder_;
4872 
4873         /**
4874          * <code>repeated .hbase.pb.ServerName favored_nodes = 3;</code>
4875          */
4876         public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName> getFavoredNodesList() {
4877           if (favoredNodesBuilder_ == null) {
4878             return java.util.Collections.unmodifiableList(favoredNodes_);
4879           } else {
4880             return favoredNodesBuilder_.getMessageList();
4881           }
4882         }
4883         /**
4884          * <code>repeated .hbase.pb.ServerName favored_nodes = 3;</code>
4885          */
4886         public int getFavoredNodesCount() {
4887           if (favoredNodesBuilder_ == null) {
4888             return favoredNodes_.size();
4889           } else {
4890             return favoredNodesBuilder_.getCount();
4891           }
4892         }
4893         /**
4894          * <code>repeated .hbase.pb.ServerName favored_nodes = 3;</code>
4895          */
4896         public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getFavoredNodes(int index) {
4897           if (favoredNodesBuilder_ == null) {
4898             return favoredNodes_.get(index);
4899           } else {
4900             return favoredNodesBuilder_.getMessage(index);
4901           }
4902         }
4903         /**
4904          * <code>repeated .hbase.pb.ServerName favored_nodes = 3;</code>
4905          */
4906         public Builder setFavoredNodes(
4907             int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) {
4908           if (favoredNodesBuilder_ == null) {
4909             if (value == null) {
4910               throw new NullPointerException();
4911             }
4912             ensureFavoredNodesIsMutable();
4913             favoredNodes_.set(index, value);
4914             onChanged();
4915           } else {
4916             favoredNodesBuilder_.setMessage(index, value);
4917           }
4918           return this;
4919         }
4920         /**
4921          * <code>repeated .hbase.pb.ServerName favored_nodes = 3;</code>
4922          */
4923         public Builder setFavoredNodes(
4924             int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) {
4925           if (favoredNodesBuilder_ == null) {
4926             ensureFavoredNodesIsMutable();
4927             favoredNodes_.set(index, builderForValue.build());
4928             onChanged();
4929           } else {
4930             favoredNodesBuilder_.setMessage(index, builderForValue.build());
4931           }
4932           return this;
4933         }
4934         /**
4935          * <code>repeated .hbase.pb.ServerName favored_nodes = 3;</code>
4936          */
4937         public Builder addFavoredNodes(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) {
4938           if (favoredNodesBuilder_ == null) {
4939             if (value == null) {
4940               throw new NullPointerException();
4941             }
4942             ensureFavoredNodesIsMutable();
4943             favoredNodes_.add(value);
4944             onChanged();
4945           } else {
4946             favoredNodesBuilder_.addMessage(value);
4947           }
4948           return this;
4949         }
4950         /**
4951          * <code>repeated .hbase.pb.ServerName favored_nodes = 3;</code>
4952          */
4953         public Builder addFavoredNodes(
4954             int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) {
4955           if (favoredNodesBuilder_ == null) {
4956             if (value == null) {
4957               throw new NullPointerException();
4958             }
4959             ensureFavoredNodesIsMutable();
4960             favoredNodes_.add(index, value);
4961             onChanged();
4962           } else {
4963             favoredNodesBuilder_.addMessage(index, value);
4964           }
4965           return this;
4966         }
4967         /**
4968          * <code>repeated .hbase.pb.ServerName favored_nodes = 3;</code>
4969          */
4970         public Builder addFavoredNodes(
4971             org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) {
4972           if (favoredNodesBuilder_ == null) {
4973             ensureFavoredNodesIsMutable();
4974             favoredNodes_.add(builderForValue.build());
4975             onChanged();
4976           } else {
4977             favoredNodesBuilder_.addMessage(builderForValue.build());
4978           }
4979           return this;
4980         }
4981         /**
4982          * <code>repeated .hbase.pb.ServerName favored_nodes = 3;</code>
4983          */
4984         public Builder addFavoredNodes(
4985             int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) {
4986           if (favoredNodesBuilder_ == null) {
4987             ensureFavoredNodesIsMutable();
4988             favoredNodes_.add(index, builderForValue.build());
4989             onChanged();
4990           } else {
4991             favoredNodesBuilder_.addMessage(index, builderForValue.build());
4992           }
4993           return this;
4994         }
4995         /**
4996          * <code>repeated .hbase.pb.ServerName favored_nodes = 3;</code>
4997          */
4998         public Builder addAllFavoredNodes(
4999             java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName> values) {
5000           if (favoredNodesBuilder_ == null) {
5001             ensureFavoredNodesIsMutable();
5002             super.addAll(values, favoredNodes_);
5003             onChanged();
5004           } else {
5005             favoredNodesBuilder_.addAllMessages(values);
5006           }
5007           return this;
5008         }
5009         /**
5010          * <code>repeated .hbase.pb.ServerName favored_nodes = 3;</code>
5011          */
5012         public Builder clearFavoredNodes() {
5013           if (favoredNodesBuilder_ == null) {
5014             favoredNodes_ = java.util.Collections.emptyList();
5015             bitField0_ = (bitField0_ & ~0x00000004);
5016             onChanged();
5017           } else {
5018             favoredNodesBuilder_.clear();
5019           }
5020           return this;
5021         }
5022         /**
5023          * <code>repeated .hbase.pb.ServerName favored_nodes = 3;</code>
5024          */
5025         public Builder removeFavoredNodes(int index) {
5026           if (favoredNodesBuilder_ == null) {
5027             ensureFavoredNodesIsMutable();
5028             favoredNodes_.remove(index);
5029             onChanged();
5030           } else {
5031             favoredNodesBuilder_.remove(index);
5032           }
5033           return this;
5034         }
5035         /**
5036          * <code>repeated .hbase.pb.ServerName favored_nodes = 3;</code>
5037          */
5038         public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getFavoredNodesBuilder(
5039             int index) {
5040           return getFavoredNodesFieldBuilder().getBuilder(index);
5041         }
5042         /**
5043          * <code>repeated .hbase.pb.ServerName favored_nodes = 3;</code>
5044          */
5045         public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getFavoredNodesOrBuilder(
5046             int index) {
5047           if (favoredNodesBuilder_ == null) {
5048             return favoredNodes_.get(index);  } else {
5049             return favoredNodesBuilder_.getMessageOrBuilder(index);
5050           }
5051         }
5052         /**
5053          * <code>repeated .hbase.pb.ServerName favored_nodes = 3;</code>
5054          */
5055         public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> 
5056              getFavoredNodesOrBuilderList() {
5057           if (favoredNodesBuilder_ != null) {
5058             return favoredNodesBuilder_.getMessageOrBuilderList();
5059           } else {
5060             return java.util.Collections.unmodifiableList(favoredNodes_);
5061           }
5062         }
5063         /**
5064          * <code>repeated .hbase.pb.ServerName favored_nodes = 3;</code>
5065          */
5066         public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder addFavoredNodesBuilder() {
5067           return getFavoredNodesFieldBuilder().addBuilder(
5068               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance());
5069         }
5070         /**
5071          * <code>repeated .hbase.pb.ServerName favored_nodes = 3;</code>
5072          */
5073         public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder addFavoredNodesBuilder(
5074             int index) {
5075           return getFavoredNodesFieldBuilder().addBuilder(
5076               index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance());
5077         }
5078         /**
5079          * <code>repeated .hbase.pb.ServerName favored_nodes = 3;</code>
5080          */
5081         public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder> 
5082              getFavoredNodesBuilderList() {
5083           return getFavoredNodesFieldBuilder().getBuilderList();
5084         }
5085         private com.google.protobuf.RepeatedFieldBuilder<
5086             org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> 
5087             getFavoredNodesFieldBuilder() {
5088           if (favoredNodesBuilder_ == null) {
5089             favoredNodesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
5090                 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>(
5091                     favoredNodes_,
5092                     ((bitField0_ & 0x00000004) == 0x00000004),
5093                     getParentForChildren(),
5094                     isClean());
5095             favoredNodes_ = null;
5096           }
5097           return favoredNodesBuilder_;
5098         }
5099 
5100         // optional bool openForDistributedLogReplay = 4;
5101         private boolean openForDistributedLogReplay_ ;
5102         /**
5103          * <code>optional bool openForDistributedLogReplay = 4;</code>
5104          *
5105          * <pre>
5106          * open region for distributedLogReplay
5107          * </pre>
5108          */
5109         public boolean hasOpenForDistributedLogReplay() {
5110           return ((bitField0_ & 0x00000008) == 0x00000008);
5111         }
5112         /**
5113          * <code>optional bool openForDistributedLogReplay = 4;</code>
5114          *
5115          * <pre>
5116          * open region for distributedLogReplay
5117          * </pre>
5118          */
5119         public boolean getOpenForDistributedLogReplay() {
5120           return openForDistributedLogReplay_;
5121         }
5122         /**
5123          * <code>optional bool openForDistributedLogReplay = 4;</code>
5124          *
5125          * <pre>
5126          * open region for distributedLogReplay
5127          * </pre>
5128          */
5129         public Builder setOpenForDistributedLogReplay(boolean value) {
5130           bitField0_ |= 0x00000008;
5131           openForDistributedLogReplay_ = value;
5132           onChanged();
5133           return this;
5134         }
5135         /**
5136          * <code>optional bool openForDistributedLogReplay = 4;</code>
5137          *
5138          * <pre>
5139          * open region for distributedLogReplay
5140          * </pre>
5141          */
5142         public Builder clearOpenForDistributedLogReplay() {
5143           bitField0_ = (bitField0_ & ~0x00000008);
5144           openForDistributedLogReplay_ = false;
5145           onChanged();
5146           return this;
5147         }
5148 
5149         // @@protoc_insertion_point(builder_scope:hbase.pb.OpenRegionRequest.RegionOpenInfo)
5150       }
5151 
5152       static {
5153         defaultInstance = new RegionOpenInfo(true);
5154         defaultInstance.initFields();
5155       }
5156 
5157       // @@protoc_insertion_point(class_scope:hbase.pb.OpenRegionRequest.RegionOpenInfo)
5158     }
5159 
5160     private int bitField0_;
5161     // repeated .hbase.pb.OpenRegionRequest.RegionOpenInfo open_info = 1;
5162     public static final int OPEN_INFO_FIELD_NUMBER = 1;
5163     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo> openInfo_;
5164     /**
5165      * <code>repeated .hbase.pb.OpenRegionRequest.RegionOpenInfo open_info = 1;</code>
5166      */
5167     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo> getOpenInfoList() {
5168       return openInfo_;
5169     }
5170     /**
5171      * <code>repeated .hbase.pb.OpenRegionRequest.RegionOpenInfo open_info = 1;</code>
5172      */
5173     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder> 
5174         getOpenInfoOrBuilderList() {
5175       return openInfo_;
5176     }
5177     /**
5178      * <code>repeated .hbase.pb.OpenRegionRequest.RegionOpenInfo open_info = 1;</code>
5179      */
5180     public int getOpenInfoCount() {
5181       return openInfo_.size();
5182     }
5183     /**
5184      * <code>repeated .hbase.pb.OpenRegionRequest.RegionOpenInfo open_info = 1;</code>
5185      */
5186     public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo getOpenInfo(int index) {
5187       return openInfo_.get(index);
5188     }
5189     /**
5190      * <code>repeated .hbase.pb.OpenRegionRequest.RegionOpenInfo open_info = 1;</code>
5191      */
5192     public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder getOpenInfoOrBuilder(
5193         int index) {
5194       return openInfo_.get(index);
5195     }
5196 
5197     // optional uint64 serverStartCode = 2;
5198     public static final int SERVERSTARTCODE_FIELD_NUMBER = 2;
5199     private long serverStartCode_;
5200     /**
5201      * <code>optional uint64 serverStartCode = 2;</code>
5202      *
5203      * <pre>
5204      * the intended server for this RPC.
5205      * </pre>
5206      */
5207     public boolean hasServerStartCode() {
5208       return ((bitField0_ & 0x00000001) == 0x00000001);
5209     }
5210     /**
5211      * <code>optional uint64 serverStartCode = 2;</code>
5212      *
5213      * <pre>
5214      * the intended server for this RPC.
5215      * </pre>
5216      */
5217     public long getServerStartCode() {
5218       return serverStartCode_;
5219     }
5220 
5221     // optional uint64 master_system_time = 5;
5222     public static final int MASTER_SYSTEM_TIME_FIELD_NUMBER = 5;
5223     private long masterSystemTime_;
5224     /**
5225      * <code>optional uint64 master_system_time = 5;</code>
5226      *
5227      * <pre>
5228      * wall clock time from master
5229      * </pre>
5230      */
5231     public boolean hasMasterSystemTime() {
5232       return ((bitField0_ & 0x00000002) == 0x00000002);
5233     }
5234     /**
5235      * <code>optional uint64 master_system_time = 5;</code>
5236      *
5237      * <pre>
5238      * wall clock time from master
5239      * </pre>
5240      */
5241     public long getMasterSystemTime() {
5242       return masterSystemTime_;
5243     }
5244 
5245     private void initFields() {
5246       openInfo_ = java.util.Collections.emptyList();
5247       serverStartCode_ = 0L;
5248       masterSystemTime_ = 0L;
5249     }
5250     private byte memoizedIsInitialized = -1;
5251     public final boolean isInitialized() {
5252       byte isInitialized = memoizedIsInitialized;
5253       if (isInitialized != -1) return isInitialized == 1;
5254 
5255       for (int i = 0; i < getOpenInfoCount(); i++) {
5256         if (!getOpenInfo(i).isInitialized()) {
5257           memoizedIsInitialized = 0;
5258           return false;
5259         }
5260       }
5261       memoizedIsInitialized = 1;
5262       return true;
5263     }
5264 
5265     public void writeTo(com.google.protobuf.CodedOutputStream output)
5266                         throws java.io.IOException {
5267       getSerializedSize();
5268       for (int i = 0; i < openInfo_.size(); i++) {
5269         output.writeMessage(1, openInfo_.get(i));
5270       }
5271       if (((bitField0_ & 0x00000001) == 0x00000001)) {
5272         output.writeUInt64(2, serverStartCode_);
5273       }
5274       if (((bitField0_ & 0x00000002) == 0x00000002)) {
5275         output.writeUInt64(5, masterSystemTime_);
5276       }
5277       getUnknownFields().writeTo(output);
5278     }
5279 
5280     private int memoizedSerializedSize = -1;
5281     public int getSerializedSize() {
5282       int size = memoizedSerializedSize;
5283       if (size != -1) return size;
5284 
5285       size = 0;
5286       for (int i = 0; i < openInfo_.size(); i++) {
5287         size += com.google.protobuf.CodedOutputStream
5288           .computeMessageSize(1, openInfo_.get(i));
5289       }
5290       if (((bitField0_ & 0x00000001) == 0x00000001)) {
5291         size += com.google.protobuf.CodedOutputStream
5292           .computeUInt64Size(2, serverStartCode_);
5293       }
5294       if (((bitField0_ & 0x00000002) == 0x00000002)) {
5295         size += com.google.protobuf.CodedOutputStream
5296           .computeUInt64Size(5, masterSystemTime_);
5297       }
5298       size += getUnknownFields().getSerializedSize();
5299       memoizedSerializedSize = size;
5300       return size;
5301     }
5302 
5303     private static final long serialVersionUID = 0L;
5304     @java.lang.Override
5305     protected java.lang.Object writeReplace()
5306         throws java.io.ObjectStreamException {
5307       return super.writeReplace();
5308     }
5309 
5310     @java.lang.Override
5311     public boolean equals(final java.lang.Object obj) {
5312       if (obj == this) {
5313        return true;
5314       }
5315       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest)) {
5316         return super.equals(obj);
5317       }
5318       org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest) obj;
5319 
5320       boolean result = true;
5321       result = result && getOpenInfoList()
5322           .equals(other.getOpenInfoList());
5323       result = result && (hasServerStartCode() == other.hasServerStartCode());
5324       if (hasServerStartCode()) {
5325         result = result && (getServerStartCode()
5326             == other.getServerStartCode());
5327       }
5328       result = result && (hasMasterSystemTime() == other.hasMasterSystemTime());
5329       if (hasMasterSystemTime()) {
5330         result = result && (getMasterSystemTime()
5331             == other.getMasterSystemTime());
5332       }
5333       result = result &&
5334           getUnknownFields().equals(other.getUnknownFields());
5335       return result;
5336     }
5337 
5338     private int memoizedHashCode = 0;
5339     @java.lang.Override
5340     public int hashCode() {
5341       if (memoizedHashCode != 0) {
5342         return memoizedHashCode;
5343       }
5344       int hash = 41;
5345       hash = (19 * hash) + getDescriptorForType().hashCode();
5346       if (getOpenInfoCount() > 0) {
5347         hash = (37 * hash) + OPEN_INFO_FIELD_NUMBER;
5348         hash = (53 * hash) + getOpenInfoList().hashCode();
5349       }
5350       if (hasServerStartCode()) {
5351         hash = (37 * hash) + SERVERSTARTCODE_FIELD_NUMBER;
5352         hash = (53 * hash) + hashLong(getServerStartCode());
5353       }
5354       if (hasMasterSystemTime()) {
5355         hash = (37 * hash) + MASTER_SYSTEM_TIME_FIELD_NUMBER;
5356         hash = (53 * hash) + hashLong(getMasterSystemTime());
5357       }
5358       hash = (29 * hash) + getUnknownFields().hashCode();
5359       memoizedHashCode = hash;
5360       return hash;
5361     }
5362 
5363     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom(
5364         com.google.protobuf.ByteString data)
5365         throws com.google.protobuf.InvalidProtocolBufferException {
5366       return PARSER.parseFrom(data);
5367     }
5368     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom(
5369         com.google.protobuf.ByteString data,
5370         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5371         throws com.google.protobuf.InvalidProtocolBufferException {
5372       return PARSER.parseFrom(data, extensionRegistry);
5373     }
5374     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom(byte[] data)
5375         throws com.google.protobuf.InvalidProtocolBufferException {
5376       return PARSER.parseFrom(data);
5377     }
5378     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom(
5379         byte[] data,
5380         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5381         throws com.google.protobuf.InvalidProtocolBufferException {
5382       return PARSER.parseFrom(data, extensionRegistry);
5383     }
5384     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom(java.io.InputStream input)
5385         throws java.io.IOException {
5386       return PARSER.parseFrom(input);
5387     }
5388     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom(
5389         java.io.InputStream input,
5390         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5391         throws java.io.IOException {
5392       return PARSER.parseFrom(input, extensionRegistry);
5393     }
5394     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseDelimitedFrom(java.io.InputStream input)
5395         throws java.io.IOException {
5396       return PARSER.parseDelimitedFrom(input);
5397     }
5398     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseDelimitedFrom(
5399         java.io.InputStream input,
5400         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5401         throws java.io.IOException {
5402       return PARSER.parseDelimitedFrom(input, extensionRegistry);
5403     }
5404     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom(
5405         com.google.protobuf.CodedInputStream input)
5406         throws java.io.IOException {
5407       return PARSER.parseFrom(input);
5408     }
5409     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom(
5410         com.google.protobuf.CodedInputStream input,
5411         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5412         throws java.io.IOException {
5413       return PARSER.parseFrom(input, extensionRegistry);
5414     }
5415 
5416     public static Builder newBuilder() { return Builder.create(); }
5417     public Builder newBuilderForType() { return newBuilder(); }
5418     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest prototype) {
5419       return newBuilder().mergeFrom(prototype);
5420     }
5421     public Builder toBuilder() { return newBuilder(this); }
5422 
5423     @java.lang.Override
5424     protected Builder newBuilderForType(
5425         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
5426       Builder builder = new Builder(parent);
5427       return builder;
5428     }
5429     /**
5430      * Protobuf type {@code hbase.pb.OpenRegionRequest}
5431      */
5432     public static final class Builder extends
5433         com.google.protobuf.GeneratedMessage.Builder<Builder>
5434        implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequestOrBuilder {
5435       public static final com.google.protobuf.Descriptors.Descriptor
5436           getDescriptor() {
5437         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_OpenRegionRequest_descriptor;
5438       }
5439 
5440       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
5441           internalGetFieldAccessorTable() {
5442         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_OpenRegionRequest_fieldAccessorTable
5443             .ensureFieldAccessorsInitialized(
5444                 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.Builder.class);
5445       }
5446 
5447       // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.newBuilder()
5448       private Builder() {
5449         maybeForceBuilderInitialization();
5450       }
5451 
5452       private Builder(
5453           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
5454         super(parent);
5455         maybeForceBuilderInitialization();
5456       }
5457       private void maybeForceBuilderInitialization() {
5458         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
5459           getOpenInfoFieldBuilder();
5460         }
5461       }
5462       private static Builder create() {
5463         return new Builder();
5464       }
5465 
5466       public Builder clear() {
5467         super.clear();
5468         if (openInfoBuilder_ == null) {
5469           openInfo_ = java.util.Collections.emptyList();
5470           bitField0_ = (bitField0_ & ~0x00000001);
5471         } else {
5472           openInfoBuilder_.clear();
5473         }
5474         serverStartCode_ = 0L;
5475         bitField0_ = (bitField0_ & ~0x00000002);
5476         masterSystemTime_ = 0L;
5477         bitField0_ = (bitField0_ & ~0x00000004);
5478         return this;
5479       }
5480 
5481       public Builder clone() {
5482         return create().mergeFrom(buildPartial());
5483       }
5484 
5485       public com.google.protobuf.Descriptors.Descriptor
5486           getDescriptorForType() {
5487         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_OpenRegionRequest_descriptor;
5488       }
5489 
5490       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest getDefaultInstanceForType() {
5491         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.getDefaultInstance();
5492       }
5493 
5494       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest build() {
5495         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest result = buildPartial();
5496         if (!result.isInitialized()) {
5497           throw newUninitializedMessageException(result);
5498         }
5499         return result;
5500       }
5501 
5502       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest buildPartial() {
5503         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest(this);
5504         int from_bitField0_ = bitField0_;
5505         int to_bitField0_ = 0;
5506         if (openInfoBuilder_ == null) {
5507           if (((bitField0_ & 0x00000001) == 0x00000001)) {
5508             openInfo_ = java.util.Collections.unmodifiableList(openInfo_);
5509             bitField0_ = (bitField0_ & ~0x00000001);
5510           }
5511           result.openInfo_ = openInfo_;
5512         } else {
5513           result.openInfo_ = openInfoBuilder_.build();
5514         }
5515         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
5516           to_bitField0_ |= 0x00000001;
5517         }
5518         result.serverStartCode_ = serverStartCode_;
5519         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
5520           to_bitField0_ |= 0x00000002;
5521         }
5522         result.masterSystemTime_ = masterSystemTime_;
5523         result.bitField0_ = to_bitField0_;
5524         onBuilt();
5525         return result;
5526       }
5527 
5528       public Builder mergeFrom(com.google.protobuf.Message other) {
5529         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest) {
5530           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest)other);
5531         } else {
5532           super.mergeFrom(other);
5533           return this;
5534         }
5535       }
5536 
5537       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest other) {
5538         if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.getDefaultInstance()) return this;
5539         if (openInfoBuilder_ == null) {
5540           if (!other.openInfo_.isEmpty()) {
5541             if (openInfo_.isEmpty()) {
5542               openInfo_ = other.openInfo_;
5543               bitField0_ = (bitField0_ & ~0x00000001);
5544             } else {
5545               ensureOpenInfoIsMutable();
5546               openInfo_.addAll(other.openInfo_);
5547             }
5548             onChanged();
5549           }
5550         } else {
5551           if (!other.openInfo_.isEmpty()) {
5552             if (openInfoBuilder_.isEmpty()) {
5553               openInfoBuilder_.dispose();
5554               openInfoBuilder_ = null;
5555               openInfo_ = other.openInfo_;
5556               bitField0_ = (bitField0_ & ~0x00000001);
5557               openInfoBuilder_ = 
5558                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
5559                    getOpenInfoFieldBuilder() : null;
5560             } else {
5561               openInfoBuilder_.addAllMessages(other.openInfo_);
5562             }
5563           }
5564         }
5565         if (other.hasServerStartCode()) {
5566           setServerStartCode(other.getServerStartCode());
5567         }
5568         if (other.hasMasterSystemTime()) {
5569           setMasterSystemTime(other.getMasterSystemTime());
5570         }
5571         this.mergeUnknownFields(other.getUnknownFields());
5572         return this;
5573       }
5574 
5575       public final boolean isInitialized() {
5576         for (int i = 0; i < getOpenInfoCount(); i++) {
5577           if (!getOpenInfo(i).isInitialized()) {
5578             
5579             return false;
5580           }
5581         }
5582         return true;
5583       }
5584 
5585       public Builder mergeFrom(
5586           com.google.protobuf.CodedInputStream input,
5587           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5588           throws java.io.IOException {
5589         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parsedMessage = null;
5590         try {
5591           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
5592         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
5593           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest) e.getUnfinishedMessage();
5594           throw e;
5595         } finally {
5596           if (parsedMessage != null) {
5597             mergeFrom(parsedMessage);
5598           }
5599         }
5600         return this;
5601       }
5602       private int bitField0_;
5603 
5604       // repeated .hbase.pb.OpenRegionRequest.RegionOpenInfo open_info = 1;
5605       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo> openInfo_ =
5606         java.util.Collections.emptyList();
5607       private void ensureOpenInfoIsMutable() {
5608         if (!((bitField0_ & 0x00000001) == 0x00000001)) {
5609           openInfo_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo>(openInfo_);
5610           bitField0_ |= 0x00000001;
5611          }
5612       }
5613 
5614       private com.google.protobuf.RepeatedFieldBuilder<
5615           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder> openInfoBuilder_;
5616 
5617       /**
5618        * <code>repeated .hbase.pb.OpenRegionRequest.RegionOpenInfo open_info = 1;</code>
5619        */
5620       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo> getOpenInfoList() {
5621         if (openInfoBuilder_ == null) {
5622           return java.util.Collections.unmodifiableList(openInfo_);
5623         } else {
5624           return openInfoBuilder_.getMessageList();
5625         }
5626       }
5627       /**
5628        * <code>repeated .hbase.pb.OpenRegionRequest.RegionOpenInfo open_info = 1;</code>
5629        */
5630       public int getOpenInfoCount() {
5631         if (openInfoBuilder_ == null) {
5632           return openInfo_.size();
5633         } else {
5634           return openInfoBuilder_.getCount();
5635         }
5636       }
5637       /**
5638        * <code>repeated .hbase.pb.OpenRegionRequest.RegionOpenInfo open_info = 1;</code>
5639        */
5640       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo getOpenInfo(int index) {
5641         if (openInfoBuilder_ == null) {
5642           return openInfo_.get(index);
5643         } else {
5644           return openInfoBuilder_.getMessage(index);
5645         }
5646       }
5647       /**
5648        * <code>repeated .hbase.pb.OpenRegionRequest.RegionOpenInfo open_info = 1;</code>
5649        */
5650       public Builder setOpenInfo(
5651           int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo value) {
5652         if (openInfoBuilder_ == null) {
5653           if (value == null) {
5654             throw new NullPointerException();
5655           }
5656           ensureOpenInfoIsMutable();
5657           openInfo_.set(index, value);
5658           onChanged();
5659         } else {
5660           openInfoBuilder_.setMessage(index, value);
5661         }
5662         return this;
5663       }
5664       /**
5665        * <code>repeated .hbase.pb.OpenRegionRequest.RegionOpenInfo open_info = 1;</code>
5666        */
5667       public Builder setOpenInfo(
5668           int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder builderForValue) {
5669         if (openInfoBuilder_ == null) {
5670           ensureOpenInfoIsMutable();
5671           openInfo_.set(index, builderForValue.build());
5672           onChanged();
5673         } else {
5674           openInfoBuilder_.setMessage(index, builderForValue.build());
5675         }
5676         return this;
5677       }
5678       /**
5679        * <code>repeated .hbase.pb.OpenRegionRequest.RegionOpenInfo open_info = 1;</code>
5680        */
5681       public Builder addOpenInfo(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo value) {
5682         if (openInfoBuilder_ == null) {
5683           if (value == null) {
5684             throw new NullPointerException();
5685           }
5686           ensureOpenInfoIsMutable();
5687           openInfo_.add(value);
5688           onChanged();
5689         } else {
5690           openInfoBuilder_.addMessage(value);
5691         }
5692         return this;
5693       }
5694       /**
5695        * <code>repeated .hbase.pb.OpenRegionRequest.RegionOpenInfo open_info = 1;</code>
5696        */
5697       public Builder addOpenInfo(
5698           int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo value) {
5699         if (openInfoBuilder_ == null) {
5700           if (value == null) {
5701             throw new NullPointerException();
5702           }
5703           ensureOpenInfoIsMutable();
5704           openInfo_.add(index, value);
5705           onChanged();
5706         } else {
5707           openInfoBuilder_.addMessage(index, value);
5708         }
5709         return this;
5710       }
5711       /**
5712        * <code>repeated .hbase.pb.OpenRegionRequest.RegionOpenInfo open_info = 1;</code>
5713        */
5714       public Builder addOpenInfo(
5715           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder builderForValue) {
5716         if (openInfoBuilder_ == null) {
5717           ensureOpenInfoIsMutable();
5718           openInfo_.add(builderForValue.build());
5719           onChanged();
5720         } else {
5721           openInfoBuilder_.addMessage(builderForValue.build());
5722         }
5723         return this;
5724       }
5725       /**
5726        * <code>repeated .hbase.pb.OpenRegionRequest.RegionOpenInfo open_info = 1;</code>
5727        */
5728       public Builder addOpenInfo(
5729           int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder builderForValue) {
5730         if (openInfoBuilder_ == null) {
5731           ensureOpenInfoIsMutable();
5732           openInfo_.add(index, builderForValue.build());
5733           onChanged();
5734         } else {
5735           openInfoBuilder_.addMessage(index, builderForValue.build());
5736         }
5737         return this;
5738       }
5739       /**
5740        * <code>repeated .hbase.pb.OpenRegionRequest.RegionOpenInfo open_info = 1;</code>
5741        */
5742       public Builder addAllOpenInfo(
5743           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo> values) {
5744         if (openInfoBuilder_ == null) {
5745           ensureOpenInfoIsMutable();
5746           super.addAll(values, openInfo_);
5747           onChanged();
5748         } else {
5749           openInfoBuilder_.addAllMessages(values);
5750         }
5751         return this;
5752       }
5753       /**
5754        * <code>repeated .hbase.pb.OpenRegionRequest.RegionOpenInfo open_info = 1;</code>
5755        */
5756       public Builder clearOpenInfo() {
5757         if (openInfoBuilder_ == null) {
5758           openInfo_ = java.util.Collections.emptyList();
5759           bitField0_ = (bitField0_ & ~0x00000001);
5760           onChanged();
5761         } else {
5762           openInfoBuilder_.clear();
5763         }
5764         return this;
5765       }
5766       /**
5767        * <code>repeated .hbase.pb.OpenRegionRequest.RegionOpenInfo open_info = 1;</code>
5768        */
5769       public Builder removeOpenInfo(int index) {
5770         if (openInfoBuilder_ == null) {
5771           ensureOpenInfoIsMutable();
5772           openInfo_.remove(index);
5773           onChanged();
5774         } else {
5775           openInfoBuilder_.remove(index);
5776         }
5777         return this;
5778       }
5779       /**
5780        * <code>repeated .hbase.pb.OpenRegionRequest.RegionOpenInfo open_info = 1;</code>
5781        */
5782       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder getOpenInfoBuilder(
5783           int index) {
5784         return getOpenInfoFieldBuilder().getBuilder(index);
5785       }
5786       /**
5787        * <code>repeated .hbase.pb.OpenRegionRequest.RegionOpenInfo open_info = 1;</code>
5788        */
5789       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder getOpenInfoOrBuilder(
5790           int index) {
5791         if (openInfoBuilder_ == null) {
5792           return openInfo_.get(index);  } else {
5793           return openInfoBuilder_.getMessageOrBuilder(index);
5794         }
5795       }
5796       /**
5797        * <code>repeated .hbase.pb.OpenRegionRequest.RegionOpenInfo open_info = 1;</code>
5798        */
5799       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder> 
5800            getOpenInfoOrBuilderList() {
5801         if (openInfoBuilder_ != null) {
5802           return openInfoBuilder_.getMessageOrBuilderList();
5803         } else {
5804           return java.util.Collections.unmodifiableList(openInfo_);
5805         }
5806       }
5807       /**
5808        * <code>repeated .hbase.pb.OpenRegionRequest.RegionOpenInfo open_info = 1;</code>
5809        */
5810       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder addOpenInfoBuilder() {
5811         return getOpenInfoFieldBuilder().addBuilder(
5812             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.getDefaultInstance());
5813       }
5814       /**
5815        * <code>repeated .hbase.pb.OpenRegionRequest.RegionOpenInfo open_info = 1;</code>
5816        */
5817       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder addOpenInfoBuilder(
5818           int index) {
5819         return getOpenInfoFieldBuilder().addBuilder(
5820             index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.getDefaultInstance());
5821       }
5822       /**
5823        * <code>repeated .hbase.pb.OpenRegionRequest.RegionOpenInfo open_info = 1;</code>
5824        */
5825       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder> 
5826            getOpenInfoBuilderList() {
5827         return getOpenInfoFieldBuilder().getBuilderList();
5828       }
5829       private com.google.protobuf.RepeatedFieldBuilder<
5830           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder> 
5831           getOpenInfoFieldBuilder() {
5832         if (openInfoBuilder_ == null) {
5833           openInfoBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
5834               org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder>(
5835                   openInfo_,
5836                   ((bitField0_ & 0x00000001) == 0x00000001),
5837                   getParentForChildren(),
5838                   isClean());
5839           openInfo_ = null;
5840         }
5841         return openInfoBuilder_;
5842       }
5843 
5844       // optional uint64 serverStartCode = 2;
5845       private long serverStartCode_ ;
5846       /**
5847        * <code>optional uint64 serverStartCode = 2;</code>
5848        *
5849        * <pre>
5850        * the intended server for this RPC.
5851        * </pre>
5852        */
5853       public boolean hasServerStartCode() {
5854         return ((bitField0_ & 0x00000002) == 0x00000002);
5855       }
5856       /**
5857        * <code>optional uint64 serverStartCode = 2;</code>
5858        *
5859        * <pre>
5860        * the intended server for this RPC.
5861        * </pre>
5862        */
5863       public long getServerStartCode() {
5864         return serverStartCode_;
5865       }
5866       /**
5867        * <code>optional uint64 serverStartCode = 2;</code>
5868        *
5869        * <pre>
5870        * the intended server for this RPC.
5871        * </pre>
5872        */
5873       public Builder setServerStartCode(long value) {
5874         bitField0_ |= 0x00000002;
5875         serverStartCode_ = value;
5876         onChanged();
5877         return this;
5878       }
5879       /**
5880        * <code>optional uint64 serverStartCode = 2;</code>
5881        *
5882        * <pre>
5883        * the intended server for this RPC.
5884        * </pre>
5885        */
5886       public Builder clearServerStartCode() {
5887         bitField0_ = (bitField0_ & ~0x00000002);
5888         serverStartCode_ = 0L;
5889         onChanged();
5890         return this;
5891       }
5892 
5893       // optional uint64 master_system_time = 5;
5894       private long masterSystemTime_ ;
5895       /**
5896        * <code>optional uint64 master_system_time = 5;</code>
5897        *
5898        * <pre>
5899        * wall clock time from master
5900        * </pre>
5901        */
5902       public boolean hasMasterSystemTime() {
5903         return ((bitField0_ & 0x00000004) == 0x00000004);
5904       }
5905       /**
5906        * <code>optional uint64 master_system_time = 5;</code>
5907        *
5908        * <pre>
5909        * wall clock time from master
5910        * </pre>
5911        */
5912       public long getMasterSystemTime() {
5913         return masterSystemTime_;
5914       }
5915       /**
5916        * <code>optional uint64 master_system_time = 5;</code>
5917        *
5918        * <pre>
5919        * wall clock time from master
5920        * </pre>
5921        */
5922       public Builder setMasterSystemTime(long value) {
5923         bitField0_ |= 0x00000004;
5924         masterSystemTime_ = value;
5925         onChanged();
5926         return this;
5927       }
5928       /**
5929        * <code>optional uint64 master_system_time = 5;</code>
5930        *
5931        * <pre>
5932        * wall clock time from master
5933        * </pre>
5934        */
5935       public Builder clearMasterSystemTime() {
5936         bitField0_ = (bitField0_ & ~0x00000004);
5937         masterSystemTime_ = 0L;
5938         onChanged();
5939         return this;
5940       }
5941 
5942       // @@protoc_insertion_point(builder_scope:hbase.pb.OpenRegionRequest)
5943     }
5944 
5945     static {
5946       defaultInstance = new OpenRegionRequest(true);
5947       defaultInstance.initFields();
5948     }
5949 
5950     // @@protoc_insertion_point(class_scope:hbase.pb.OpenRegionRequest)
5951   }
5952 
5953   public interface OpenRegionResponseOrBuilder
5954       extends com.google.protobuf.MessageOrBuilder {
5955 
5956     // repeated .hbase.pb.OpenRegionResponse.RegionOpeningState opening_state = 1;
5957     /**
5958      * <code>repeated .hbase.pb.OpenRegionResponse.RegionOpeningState opening_state = 1;</code>
5959      */
5960     java.util.List<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState> getOpeningStateList();
5961     /**
5962      * <code>repeated .hbase.pb.OpenRegionResponse.RegionOpeningState opening_state = 1;</code>
5963      */
5964     int getOpeningStateCount();
5965     /**
5966      * <code>repeated .hbase.pb.OpenRegionResponse.RegionOpeningState opening_state = 1;</code>
5967      */
5968     org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState getOpeningState(int index);
5969   }
5970   /**
5971    * Protobuf type {@code hbase.pb.OpenRegionResponse}
5972    */
5973   public static final class OpenRegionResponse extends
5974       com.google.protobuf.GeneratedMessage
5975       implements OpenRegionResponseOrBuilder {
5976     // Use OpenRegionResponse.newBuilder() to construct.
5977     private OpenRegionResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
5978       super(builder);
5979       this.unknownFields = builder.getUnknownFields();
5980     }
5981     private OpenRegionResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
5982 
5983     private static final OpenRegionResponse defaultInstance;
5984     public static OpenRegionResponse getDefaultInstance() {
5985       return defaultInstance;
5986     }
5987 
5988     public OpenRegionResponse getDefaultInstanceForType() {
5989       return defaultInstance;
5990     }
5991 
5992     private final com.google.protobuf.UnknownFieldSet unknownFields;
5993     @java.lang.Override
5994     public final com.google.protobuf.UnknownFieldSet
5995         getUnknownFields() {
5996       return this.unknownFields;
5997     }
5998     private OpenRegionResponse(
5999         com.google.protobuf.CodedInputStream input,
6000         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6001         throws com.google.protobuf.InvalidProtocolBufferException {
6002       initFields();
6003       int mutable_bitField0_ = 0;
6004       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
6005           com.google.protobuf.UnknownFieldSet.newBuilder();
6006       try {
6007         boolean done = false;
6008         while (!done) {
6009           int tag = input.readTag();
6010           switch (tag) {
6011             case 0:
6012               done = true;
6013               break;
6014             default: {
6015               if (!parseUnknownField(input, unknownFields,
6016                                      extensionRegistry, tag)) {
6017                 done = true;
6018               }
6019               break;
6020             }
6021             case 8: {
6022               int rawValue = input.readEnum();
6023               org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState value = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState.valueOf(rawValue);
6024               if (value == null) {
6025                 unknownFields.mergeVarintField(1, rawValue);
6026               } else {
6027                 if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
6028                   openingState_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState>();
6029                   mutable_bitField0_ |= 0x00000001;
6030                 }
6031                 openingState_.add(value);
6032               }
6033               break;
6034             }
6035             case 10: {
6036               int length = input.readRawVarint32();
6037               int oldLimit = input.pushLimit(length);
6038               while(input.getBytesUntilLimit() > 0) {
6039                 int rawValue = input.readEnum();
6040                 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState value = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState.valueOf(rawValue);
6041                 if (value == null) {
6042                   unknownFields.mergeVarintField(1, rawValue);
6043                 } else {
6044                   if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
6045                     openingState_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState>();
6046                     mutable_bitField0_ |= 0x00000001;
6047                   }
6048                   openingState_.add(value);
6049                 }
6050               }
6051               input.popLimit(oldLimit);
6052               break;
6053             }
6054           }
6055         }
6056       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
6057         throw e.setUnfinishedMessage(this);
6058       } catch (java.io.IOException e) {
6059         throw new com.google.protobuf.InvalidProtocolBufferException(
6060             e.getMessage()).setUnfinishedMessage(this);
6061       } finally {
6062         if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
6063           openingState_ = java.util.Collections.unmodifiableList(openingState_);
6064         }
6065         this.unknownFields = unknownFields.build();
6066         makeExtensionsImmutable();
6067       }
6068     }
6069     public static final com.google.protobuf.Descriptors.Descriptor
6070         getDescriptor() {
6071       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_OpenRegionResponse_descriptor;
6072     }
6073 
6074     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
6075         internalGetFieldAccessorTable() {
6076       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_OpenRegionResponse_fieldAccessorTable
6077           .ensureFieldAccessorsInitialized(
6078               org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.Builder.class);
6079     }
6080 
6081     public static com.google.protobuf.Parser<OpenRegionResponse> PARSER =
6082         new com.google.protobuf.AbstractParser<OpenRegionResponse>() {
6083       public OpenRegionResponse parsePartialFrom(
6084           com.google.protobuf.CodedInputStream input,
6085           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6086           throws com.google.protobuf.InvalidProtocolBufferException {
6087         return new OpenRegionResponse(input, extensionRegistry);
6088       }
6089     };
6090 
6091     @java.lang.Override
6092     public com.google.protobuf.Parser<OpenRegionResponse> getParserForType() {
6093       return PARSER;
6094     }
6095 
6096     /**
6097      * Protobuf enum {@code hbase.pb.OpenRegionResponse.RegionOpeningState}
6098      */
6099     public enum RegionOpeningState
6100         implements com.google.protobuf.ProtocolMessageEnum {
6101       /**
6102        * <code>OPENED = 0;</code>
6103        */
6104       OPENED(0, 0),
6105       /**
6106        * <code>ALREADY_OPENED = 1;</code>
6107        */
6108       ALREADY_OPENED(1, 1),
6109       /**
6110        * <code>FAILED_OPENING = 2;</code>
6111        */
6112       FAILED_OPENING(2, 2),
6113       ;
6114 
6115       /**
6116        * <code>OPENED = 0;</code>
6117        */
6118       public static final int OPENED_VALUE = 0;
6119       /**
6120        * <code>ALREADY_OPENED = 1;</code>
6121        */
6122       public static final int ALREADY_OPENED_VALUE = 1;
6123       /**
6124        * <code>FAILED_OPENING = 2;</code>
6125        */
6126       public static final int FAILED_OPENING_VALUE = 2;
6127 
6128 
6129       public final int getNumber() { return value; }
6130 
6131       public static RegionOpeningState valueOf(int value) {
6132         switch (value) {
6133           case 0: return OPENED;
6134           case 1: return ALREADY_OPENED;
6135           case 2: return FAILED_OPENING;
6136           default: return null;
6137         }
6138       }
6139 
6140       public static com.google.protobuf.Internal.EnumLiteMap<RegionOpeningState>
6141           internalGetValueMap() {
6142         return internalValueMap;
6143       }
6144       private static com.google.protobuf.Internal.EnumLiteMap<RegionOpeningState>
6145           internalValueMap =
6146             new com.google.protobuf.Internal.EnumLiteMap<RegionOpeningState>() {
6147               public RegionOpeningState findValueByNumber(int number) {
6148                 return RegionOpeningState.valueOf(number);
6149               }
6150             };
6151 
6152       public final com.google.protobuf.Descriptors.EnumValueDescriptor
6153           getValueDescriptor() {
6154         return getDescriptor().getValues().get(index);
6155       }
6156       public final com.google.protobuf.Descriptors.EnumDescriptor
6157           getDescriptorForType() {
6158         return getDescriptor();
6159       }
6160       public static final com.google.protobuf.Descriptors.EnumDescriptor
6161           getDescriptor() {
6162         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDescriptor().getEnumTypes().get(0);
6163       }
6164 
6165       private static final RegionOpeningState[] VALUES = values();
6166 
6167       public static RegionOpeningState valueOf(
6168           com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
6169         if (desc.getType() != getDescriptor()) {
6170           throw new java.lang.IllegalArgumentException(
6171             "EnumValueDescriptor is not for this type.");
6172         }
6173         return VALUES[desc.getIndex()];
6174       }
6175 
6176       private final int index;
6177       private final int value;
6178 
6179       private RegionOpeningState(int index, int value) {
6180         this.index = index;
6181         this.value = value;
6182       }
6183 
6184       // @@protoc_insertion_point(enum_scope:hbase.pb.OpenRegionResponse.RegionOpeningState)
6185     }
6186 
6187     // repeated .hbase.pb.OpenRegionResponse.RegionOpeningState opening_state = 1;
6188     public static final int OPENING_STATE_FIELD_NUMBER = 1;
6189     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState> openingState_;
6190     /**
6191      * <code>repeated .hbase.pb.OpenRegionResponse.RegionOpeningState opening_state = 1;</code>
6192      */
6193     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState> getOpeningStateList() {
6194       return openingState_;
6195     }
6196     /**
6197      * <code>repeated .hbase.pb.OpenRegionResponse.RegionOpeningState opening_state = 1;</code>
6198      */
6199     public int getOpeningStateCount() {
6200       return openingState_.size();
6201     }
6202     /**
6203      * <code>repeated .hbase.pb.OpenRegionResponse.RegionOpeningState opening_state = 1;</code>
6204      */
6205     public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState getOpeningState(int index) {
6206       return openingState_.get(index);
6207     }
6208 
6209     private void initFields() {
6210       openingState_ = java.util.Collections.emptyList();
6211     }
6212     private byte memoizedIsInitialized = -1;
6213     public final boolean isInitialized() {
6214       byte isInitialized = memoizedIsInitialized;
6215       if (isInitialized != -1) return isInitialized == 1;
6216 
6217       memoizedIsInitialized = 1;
6218       return true;
6219     }
6220 
6221     public void writeTo(com.google.protobuf.CodedOutputStream output)
6222                         throws java.io.IOException {
6223       getSerializedSize();
6224       for (int i = 0; i < openingState_.size(); i++) {
6225         output.writeEnum(1, openingState_.get(i).getNumber());
6226       }
6227       getUnknownFields().writeTo(output);
6228     }
6229 
6230     private int memoizedSerializedSize = -1;
6231     public int getSerializedSize() {
6232       int size = memoizedSerializedSize;
6233       if (size != -1) return size;
6234 
6235       size = 0;
6236       {
6237         int dataSize = 0;
6238         for (int i = 0; i < openingState_.size(); i++) {
6239           dataSize += com.google.protobuf.CodedOutputStream
6240             .computeEnumSizeNoTag(openingState_.get(i).getNumber());
6241         }
6242         size += dataSize;
6243         size += 1 * openingState_.size();
6244       }
6245       size += getUnknownFields().getSerializedSize();
6246       memoizedSerializedSize = size;
6247       return size;
6248     }
6249 
6250     private static final long serialVersionUID = 0L;
6251     @java.lang.Override
6252     protected java.lang.Object writeReplace()
6253         throws java.io.ObjectStreamException {
6254       return super.writeReplace();
6255     }
6256 
6257     @java.lang.Override
6258     public boolean equals(final java.lang.Object obj) {
6259       if (obj == this) {
6260        return true;
6261       }
6262       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse)) {
6263         return super.equals(obj);
6264       }
6265       org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse) obj;
6266 
6267       boolean result = true;
6268       result = result && getOpeningStateList()
6269           .equals(other.getOpeningStateList());
6270       result = result &&
6271           getUnknownFields().equals(other.getUnknownFields());
6272       return result;
6273     }
6274 
6275     private int memoizedHashCode = 0;
6276     @java.lang.Override
6277     public int hashCode() {
6278       if (memoizedHashCode != 0) {
6279         return memoizedHashCode;
6280       }
6281       int hash = 41;
6282       hash = (19 * hash) + getDescriptorForType().hashCode();
6283       if (getOpeningStateCount() > 0) {
6284         hash = (37 * hash) + OPENING_STATE_FIELD_NUMBER;
6285         hash = (53 * hash) + hashEnumList(getOpeningStateList());
6286       }
6287       hash = (29 * hash) + getUnknownFields().hashCode();
6288       memoizedHashCode = hash;
6289       return hash;
6290     }
6291 
6292     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom(
6293         com.google.protobuf.ByteString data)
6294         throws com.google.protobuf.InvalidProtocolBufferException {
6295       return PARSER.parseFrom(data);
6296     }
6297     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom(
6298         com.google.protobuf.ByteString data,
6299         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6300         throws com.google.protobuf.InvalidProtocolBufferException {
6301       return PARSER.parseFrom(data, extensionRegistry);
6302     }
6303     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom(byte[] data)
6304         throws com.google.protobuf.InvalidProtocolBufferException {
6305       return PARSER.parseFrom(data);
6306     }
6307     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom(
6308         byte[] data,
6309         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6310         throws com.google.protobuf.InvalidProtocolBufferException {
6311       return PARSER.parseFrom(data, extensionRegistry);
6312     }
6313     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom(java.io.InputStream input)
6314         throws java.io.IOException {
6315       return PARSER.parseFrom(input);
6316     }
6317     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom(
6318         java.io.InputStream input,
6319         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6320         throws java.io.IOException {
6321       return PARSER.parseFrom(input, extensionRegistry);
6322     }
6323     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseDelimitedFrom(java.io.InputStream input)
6324         throws java.io.IOException {
6325       return PARSER.parseDelimitedFrom(input);
6326     }
6327     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseDelimitedFrom(
6328         java.io.InputStream input,
6329         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6330         throws java.io.IOException {
6331       return PARSER.parseDelimitedFrom(input, extensionRegistry);
6332     }
6333     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom(
6334         com.google.protobuf.CodedInputStream input)
6335         throws java.io.IOException {
6336       return PARSER.parseFrom(input);
6337     }
6338     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom(
6339         com.google.protobuf.CodedInputStream input,
6340         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6341         throws java.io.IOException {
6342       return PARSER.parseFrom(input, extensionRegistry);
6343     }
6344 
6345     public static Builder newBuilder() { return Builder.create(); }
6346     public Builder newBuilderForType() { return newBuilder(); }
6347     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse prototype) {
6348       return newBuilder().mergeFrom(prototype);
6349     }
6350     public Builder toBuilder() { return newBuilder(this); }
6351 
6352     @java.lang.Override
6353     protected Builder newBuilderForType(
6354         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
6355       Builder builder = new Builder(parent);
6356       return builder;
6357     }
6358     /**
6359      * Protobuf type {@code hbase.pb.OpenRegionResponse}
6360      */
6361     public static final class Builder extends
6362         com.google.protobuf.GeneratedMessage.Builder<Builder>
6363        implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponseOrBuilder {
6364       public static final com.google.protobuf.Descriptors.Descriptor
6365           getDescriptor() {
6366         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_OpenRegionResponse_descriptor;
6367       }
6368 
6369       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
6370           internalGetFieldAccessorTable() {
6371         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_OpenRegionResponse_fieldAccessorTable
6372             .ensureFieldAccessorsInitialized(
6373                 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.Builder.class);
6374       }
6375 
6376       // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.newBuilder()
6377       private Builder() {
6378         maybeForceBuilderInitialization();
6379       }
6380 
6381       private Builder(
6382           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
6383         super(parent);
6384         maybeForceBuilderInitialization();
6385       }
6386       private void maybeForceBuilderInitialization() {
6387         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
6388         }
6389       }
6390       private static Builder create() {
6391         return new Builder();
6392       }
6393 
6394       public Builder clear() {
6395         super.clear();
6396         openingState_ = java.util.Collections.emptyList();
6397         bitField0_ = (bitField0_ & ~0x00000001);
6398         return this;
6399       }
6400 
6401       public Builder clone() {
6402         return create().mergeFrom(buildPartial());
6403       }
6404 
6405       public com.google.protobuf.Descriptors.Descriptor
6406           getDescriptorForType() {
6407         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_OpenRegionResponse_descriptor;
6408       }
6409 
6410       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse getDefaultInstanceForType() {
6411         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDefaultInstance();
6412       }
6413 
6414       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse build() {
6415         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse result = buildPartial();
6416         if (!result.isInitialized()) {
6417           throw newUninitializedMessageException(result);
6418         }
6419         return result;
6420       }
6421 
6422       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse buildPartial() {
6423         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse(this);
6424         int from_bitField0_ = bitField0_;
6425         if (((bitField0_ & 0x00000001) == 0x00000001)) {
6426           openingState_ = java.util.Collections.unmodifiableList(openingState_);
6427           bitField0_ = (bitField0_ & ~0x00000001);
6428         }
6429         result.openingState_ = openingState_;
6430         onBuilt();
6431         return result;
6432       }
6433 
6434       public Builder mergeFrom(com.google.protobuf.Message other) {
6435         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse) {
6436           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse)other);
6437         } else {
6438           super.mergeFrom(other);
6439           return this;
6440         }
6441       }
6442 
6443       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse other) {
6444         if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDefaultInstance()) return this;
6445         if (!other.openingState_.isEmpty()) {
6446           if (openingState_.isEmpty()) {
6447             openingState_ = other.openingState_;
6448             bitField0_ = (bitField0_ & ~0x00000001);
6449           } else {
6450             ensureOpeningStateIsMutable();
6451             openingState_.addAll(other.openingState_);
6452           }
6453           onChanged();
6454         }
6455         this.mergeUnknownFields(other.getUnknownFields());
6456         return this;
6457       }
6458 
6459       public final boolean isInitialized() {
6460         return true;
6461       }
6462 
6463       public Builder mergeFrom(
6464           com.google.protobuf.CodedInputStream input,
6465           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6466           throws java.io.IOException {
6467         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parsedMessage = null;
6468         try {
6469           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
6470         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
6471           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse) e.getUnfinishedMessage();
6472           throw e;
6473         } finally {
6474           if (parsedMessage != null) {
6475             mergeFrom(parsedMessage);
6476           }
6477         }
6478         return this;
6479       }
6480       private int bitField0_;
6481 
6482       // repeated .hbase.pb.OpenRegionResponse.RegionOpeningState opening_state = 1;
6483       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState> openingState_ =
6484         java.util.Collections.emptyList();
6485       private void ensureOpeningStateIsMutable() {
6486         if (!((bitField0_ & 0x00000001) == 0x00000001)) {
6487           openingState_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState>(openingState_);
6488           bitField0_ |= 0x00000001;
6489         }
6490       }
6491       /**
6492        * <code>repeated .hbase.pb.OpenRegionResponse.RegionOpeningState opening_state = 1;</code>
6493        */
6494       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState> getOpeningStateList() {
6495         return java.util.Collections.unmodifiableList(openingState_);
6496       }
6497       /**
6498        * <code>repeated .hbase.pb.OpenRegionResponse.RegionOpeningState opening_state = 1;</code>
6499        */
6500       public int getOpeningStateCount() {
6501         return openingState_.size();
6502       }
6503       /**
6504        * <code>repeated .hbase.pb.OpenRegionResponse.RegionOpeningState opening_state = 1;</code>
6505        */
6506       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState getOpeningState(int index) {
6507         return openingState_.get(index);
6508       }
6509       /**
6510        * <code>repeated .hbase.pb.OpenRegionResponse.RegionOpeningState opening_state = 1;</code>
6511        */
6512       public Builder setOpeningState(
6513           int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState value) {
6514         if (value == null) {
6515           throw new NullPointerException();
6516         }
6517         ensureOpeningStateIsMutable();
6518         openingState_.set(index, value);
6519         onChanged();
6520         return this;
6521       }
6522       /**
6523        * <code>repeated .hbase.pb.OpenRegionResponse.RegionOpeningState opening_state = 1;</code>
6524        */
6525       public Builder addOpeningState(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState value) {
6526         if (value == null) {
6527           throw new NullPointerException();
6528         }
6529         ensureOpeningStateIsMutable();
6530         openingState_.add(value);
6531         onChanged();
6532         return this;
6533       }
6534       /**
6535        * <code>repeated .hbase.pb.OpenRegionResponse.RegionOpeningState opening_state = 1;</code>
6536        */
6537       public Builder addAllOpeningState(
6538           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState> values) {
6539         ensureOpeningStateIsMutable();
6540         super.addAll(values, openingState_);
6541         onChanged();
6542         return this;
6543       }
6544       /**
6545        * <code>repeated .hbase.pb.OpenRegionResponse.RegionOpeningState opening_state = 1;</code>
6546        */
6547       public Builder clearOpeningState() {
6548         openingState_ = java.util.Collections.emptyList();
6549         bitField0_ = (bitField0_ & ~0x00000001);
6550         onChanged();
6551         return this;
6552       }
6553 
6554       // @@protoc_insertion_point(builder_scope:hbase.pb.OpenRegionResponse)
6555     }
6556 
6557     static {
6558       defaultInstance = new OpenRegionResponse(true);
6559       defaultInstance.initFields();
6560     }
6561 
6562     // @@protoc_insertion_point(class_scope:hbase.pb.OpenRegionResponse)
6563   }
6564 
6565   public interface WarmupRegionRequestOrBuilder
6566       extends com.google.protobuf.MessageOrBuilder {
6567 
6568     // required .hbase.pb.RegionInfo regionInfo = 1;
6569     /**
6570      * <code>required .hbase.pb.RegionInfo regionInfo = 1;</code>
6571      */
6572     boolean hasRegionInfo();
6573     /**
6574      * <code>required .hbase.pb.RegionInfo regionInfo = 1;</code>
6575      */
6576     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo();
6577     /**
6578      * <code>required .hbase.pb.RegionInfo regionInfo = 1;</code>
6579      */
6580     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder();
6581   }
6582   /**
6583    * Protobuf type {@code hbase.pb.WarmupRegionRequest}
6584    */
6585   public static final class WarmupRegionRequest extends
6586       com.google.protobuf.GeneratedMessage
6587       implements WarmupRegionRequestOrBuilder {
6588     // Use WarmupRegionRequest.newBuilder() to construct.
6589     private WarmupRegionRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
6590       super(builder);
6591       this.unknownFields = builder.getUnknownFields();
6592     }
6593     private WarmupRegionRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
6594 
6595     private static final WarmupRegionRequest defaultInstance;
6596     public static WarmupRegionRequest getDefaultInstance() {
6597       return defaultInstance;
6598     }
6599 
6600     public WarmupRegionRequest getDefaultInstanceForType() {
6601       return defaultInstance;
6602     }
6603 
6604     private final com.google.protobuf.UnknownFieldSet unknownFields;
6605     @java.lang.Override
6606     public final com.google.protobuf.UnknownFieldSet
6607         getUnknownFields() {
6608       return this.unknownFields;
6609     }
6610     private WarmupRegionRequest(
6611         com.google.protobuf.CodedInputStream input,
6612         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6613         throws com.google.protobuf.InvalidProtocolBufferException {
6614       initFields();
6615       int mutable_bitField0_ = 0;
6616       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
6617           com.google.protobuf.UnknownFieldSet.newBuilder();
6618       try {
6619         boolean done = false;
6620         while (!done) {
6621           int tag = input.readTag();
6622           switch (tag) {
6623             case 0:
6624               done = true;
6625               break;
6626             default: {
6627               if (!parseUnknownField(input, unknownFields,
6628                                      extensionRegistry, tag)) {
6629                 done = true;
6630               }
6631               break;
6632             }
6633             case 10: {
6634               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder subBuilder = null;
6635               if (((bitField0_ & 0x00000001) == 0x00000001)) {
6636                 subBuilder = regionInfo_.toBuilder();
6637               }
6638               regionInfo_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.PARSER, extensionRegistry);
6639               if (subBuilder != null) {
6640                 subBuilder.mergeFrom(regionInfo_);
6641                 regionInfo_ = subBuilder.buildPartial();
6642               }
6643               bitField0_ |= 0x00000001;
6644               break;
6645             }
6646           }
6647         }
6648       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
6649         throw e.setUnfinishedMessage(this);
6650       } catch (java.io.IOException e) {
6651         throw new com.google.protobuf.InvalidProtocolBufferException(
6652             e.getMessage()).setUnfinishedMessage(this);
6653       } finally {
6654         this.unknownFields = unknownFields.build();
6655         makeExtensionsImmutable();
6656       }
6657     }
6658     public static final com.google.protobuf.Descriptors.Descriptor
6659         getDescriptor() {
6660       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_WarmupRegionRequest_descriptor;
6661     }
6662 
6663     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
6664         internalGetFieldAccessorTable() {
6665       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_WarmupRegionRequest_fieldAccessorTable
6666           .ensureFieldAccessorsInitialized(
6667               org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest.Builder.class);
6668     }
6669 
6670     public static com.google.protobuf.Parser<WarmupRegionRequest> PARSER =
6671         new com.google.protobuf.AbstractParser<WarmupRegionRequest>() {
6672       public WarmupRegionRequest parsePartialFrom(
6673           com.google.protobuf.CodedInputStream input,
6674           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6675           throws com.google.protobuf.InvalidProtocolBufferException {
6676         return new WarmupRegionRequest(input, extensionRegistry);
6677       }
6678     };
6679 
6680     @java.lang.Override
6681     public com.google.protobuf.Parser<WarmupRegionRequest> getParserForType() {
6682       return PARSER;
6683     }
6684 
6685     private int bitField0_;
6686     // required .hbase.pb.RegionInfo regionInfo = 1;
6687     public static final int REGIONINFO_FIELD_NUMBER = 1;
6688     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo regionInfo_;
6689     /**
6690      * <code>required .hbase.pb.RegionInfo regionInfo = 1;</code>
6691      */
6692     public boolean hasRegionInfo() {
6693       return ((bitField0_ & 0x00000001) == 0x00000001);
6694     }
6695     /**
6696      * <code>required .hbase.pb.RegionInfo regionInfo = 1;</code>
6697      */
6698     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo() {
6699       return regionInfo_;
6700     }
6701     /**
6702      * <code>required .hbase.pb.RegionInfo regionInfo = 1;</code>
6703      */
6704     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder() {
6705       return regionInfo_;
6706     }
6707 
6708     private void initFields() {
6709       regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance();
6710     }
6711     private byte memoizedIsInitialized = -1;
6712     public final boolean isInitialized() {
6713       byte isInitialized = memoizedIsInitialized;
6714       if (isInitialized != -1) return isInitialized == 1;
6715 
6716       if (!hasRegionInfo()) {
6717         memoizedIsInitialized = 0;
6718         return false;
6719       }
6720       if (!getRegionInfo().isInitialized()) {
6721         memoizedIsInitialized = 0;
6722         return false;
6723       }
6724       memoizedIsInitialized = 1;
6725       return true;
6726     }
6727 
6728     public void writeTo(com.google.protobuf.CodedOutputStream output)
6729                         throws java.io.IOException {
6730       getSerializedSize();
6731       if (((bitField0_ & 0x00000001) == 0x00000001)) {
6732         output.writeMessage(1, regionInfo_);
6733       }
6734       getUnknownFields().writeTo(output);
6735     }
6736 
6737     private int memoizedSerializedSize = -1;
6738     public int getSerializedSize() {
6739       int size = memoizedSerializedSize;
6740       if (size != -1) return size;
6741 
6742       size = 0;
6743       if (((bitField0_ & 0x00000001) == 0x00000001)) {
6744         size += com.google.protobuf.CodedOutputStream
6745           .computeMessageSize(1, regionInfo_);
6746       }
6747       size += getUnknownFields().getSerializedSize();
6748       memoizedSerializedSize = size;
6749       return size;
6750     }
6751 
6752     private static final long serialVersionUID = 0L;
6753     @java.lang.Override
6754     protected java.lang.Object writeReplace()
6755         throws java.io.ObjectStreamException {
6756       return super.writeReplace();
6757     }
6758 
6759     @java.lang.Override
6760     public boolean equals(final java.lang.Object obj) {
6761       if (obj == this) {
6762        return true;
6763       }
6764       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest)) {
6765         return super.equals(obj);
6766       }
6767       org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest) obj;
6768 
6769       boolean result = true;
6770       result = result && (hasRegionInfo() == other.hasRegionInfo());
6771       if (hasRegionInfo()) {
6772         result = result && getRegionInfo()
6773             .equals(other.getRegionInfo());
6774       }
6775       result = result &&
6776           getUnknownFields().equals(other.getUnknownFields());
6777       return result;
6778     }
6779 
6780     private int memoizedHashCode = 0;
6781     @java.lang.Override
6782     public int hashCode() {
6783       if (memoizedHashCode != 0) {
6784         return memoizedHashCode;
6785       }
6786       int hash = 41;
6787       hash = (19 * hash) + getDescriptorForType().hashCode();
6788       if (hasRegionInfo()) {
6789         hash = (37 * hash) + REGIONINFO_FIELD_NUMBER;
6790         hash = (53 * hash) + getRegionInfo().hashCode();
6791       }
6792       hash = (29 * hash) + getUnknownFields().hashCode();
6793       memoizedHashCode = hash;
6794       return hash;
6795     }
6796 
6797     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest parseFrom(
6798         com.google.protobuf.ByteString data)
6799         throws com.google.protobuf.InvalidProtocolBufferException {
6800       return PARSER.parseFrom(data);
6801     }
6802     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest parseFrom(
6803         com.google.protobuf.ByteString data,
6804         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6805         throws com.google.protobuf.InvalidProtocolBufferException {
6806       return PARSER.parseFrom(data, extensionRegistry);
6807     }
6808     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest parseFrom(byte[] data)
6809         throws com.google.protobuf.InvalidProtocolBufferException {
6810       return PARSER.parseFrom(data);
6811     }
6812     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest parseFrom(
6813         byte[] data,
6814         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6815         throws com.google.protobuf.InvalidProtocolBufferException {
6816       return PARSER.parseFrom(data, extensionRegistry);
6817     }
6818     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest parseFrom(java.io.InputStream input)
6819         throws java.io.IOException {
6820       return PARSER.parseFrom(input);
6821     }
6822     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest parseFrom(
6823         java.io.InputStream input,
6824         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6825         throws java.io.IOException {
6826       return PARSER.parseFrom(input, extensionRegistry);
6827     }
6828     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest parseDelimitedFrom(java.io.InputStream input)
6829         throws java.io.IOException {
6830       return PARSER.parseDelimitedFrom(input);
6831     }
6832     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest parseDelimitedFrom(
6833         java.io.InputStream input,
6834         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6835         throws java.io.IOException {
6836       return PARSER.parseDelimitedFrom(input, extensionRegistry);
6837     }
6838     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest parseFrom(
6839         com.google.protobuf.CodedInputStream input)
6840         throws java.io.IOException {
6841       return PARSER.parseFrom(input);
6842     }
6843     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest parseFrom(
6844         com.google.protobuf.CodedInputStream input,
6845         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6846         throws java.io.IOException {
6847       return PARSER.parseFrom(input, extensionRegistry);
6848     }
6849 
6850     public static Builder newBuilder() { return Builder.create(); }
6851     public Builder newBuilderForType() { return newBuilder(); }
6852     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest prototype) {
6853       return newBuilder().mergeFrom(prototype);
6854     }
6855     public Builder toBuilder() { return newBuilder(this); }
6856 
6857     @java.lang.Override
6858     protected Builder newBuilderForType(
6859         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
6860       Builder builder = new Builder(parent);
6861       return builder;
6862     }
6863     /**
6864      * Protobuf type {@code hbase.pb.WarmupRegionRequest}
6865      */
6866     public static final class Builder extends
6867         com.google.protobuf.GeneratedMessage.Builder<Builder>
6868        implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequestOrBuilder {
6869       public static final com.google.protobuf.Descriptors.Descriptor
6870           getDescriptor() {
6871         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_WarmupRegionRequest_descriptor;
6872       }
6873 
6874       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
6875           internalGetFieldAccessorTable() {
6876         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_WarmupRegionRequest_fieldAccessorTable
6877             .ensureFieldAccessorsInitialized(
6878                 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest.Builder.class);
6879       }
6880 
6881       // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest.newBuilder()
6882       private Builder() {
6883         maybeForceBuilderInitialization();
6884       }
6885 
6886       private Builder(
6887           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
6888         super(parent);
6889         maybeForceBuilderInitialization();
6890       }
6891       private void maybeForceBuilderInitialization() {
6892         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
6893           getRegionInfoFieldBuilder();
6894         }
6895       }
6896       private static Builder create() {
6897         return new Builder();
6898       }
6899 
6900       public Builder clear() {
6901         super.clear();
6902         if (regionInfoBuilder_ == null) {
6903           regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance();
6904         } else {
6905           regionInfoBuilder_.clear();
6906         }
6907         bitField0_ = (bitField0_ & ~0x00000001);
6908         return this;
6909       }
6910 
6911       public Builder clone() {
6912         return create().mergeFrom(buildPartial());
6913       }
6914 
6915       public com.google.protobuf.Descriptors.Descriptor
6916           getDescriptorForType() {
6917         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_WarmupRegionRequest_descriptor;
6918       }
6919 
6920       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest getDefaultInstanceForType() {
6921         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest.getDefaultInstance();
6922       }
6923 
6924       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest build() {
6925         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest result = buildPartial();
6926         if (!result.isInitialized()) {
6927           throw newUninitializedMessageException(result);
6928         }
6929         return result;
6930       }
6931 
6932       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest buildPartial() {
6933         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest(this);
6934         int from_bitField0_ = bitField0_;
6935         int to_bitField0_ = 0;
6936         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
6937           to_bitField0_ |= 0x00000001;
6938         }
6939         if (regionInfoBuilder_ == null) {
6940           result.regionInfo_ = regionInfo_;
6941         } else {
6942           result.regionInfo_ = regionInfoBuilder_.build();
6943         }
6944         result.bitField0_ = to_bitField0_;
6945         onBuilt();
6946         return result;
6947       }
6948 
6949       public Builder mergeFrom(com.google.protobuf.Message other) {
6950         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest) {
6951           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest)other);
6952         } else {
6953           super.mergeFrom(other);
6954           return this;
6955         }
6956       }
6957 
6958       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest other) {
6959         if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest.getDefaultInstance()) return this;
6960         if (other.hasRegionInfo()) {
6961           mergeRegionInfo(other.getRegionInfo());
6962         }
6963         this.mergeUnknownFields(other.getUnknownFields());
6964         return this;
6965       }
6966 
6967       public final boolean isInitialized() {
6968         if (!hasRegionInfo()) {
6969           
6970           return false;
6971         }
6972         if (!getRegionInfo().isInitialized()) {
6973           
6974           return false;
6975         }
6976         return true;
6977       }
6978 
6979       public Builder mergeFrom(
6980           com.google.protobuf.CodedInputStream input,
6981           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6982           throws java.io.IOException {
6983         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest parsedMessage = null;
6984         try {
6985           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
6986         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
6987           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest) e.getUnfinishedMessage();
6988           throw e;
6989         } finally {
6990           if (parsedMessage != null) {
6991             mergeFrom(parsedMessage);
6992           }
6993         }
6994         return this;
6995       }
6996       private int bitField0_;
6997 
6998       // required .hbase.pb.RegionInfo regionInfo = 1;
6999       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance();
7000       private com.google.protobuf.SingleFieldBuilder<
7001           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionInfoBuilder_;
7002       /**
7003        * <code>required .hbase.pb.RegionInfo regionInfo = 1;</code>
7004        */
7005       public boolean hasRegionInfo() {
7006         return ((bitField0_ & 0x00000001) == 0x00000001);
7007       }
7008       /**
7009        * <code>required .hbase.pb.RegionInfo regionInfo = 1;</code>
7010        */
7011       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo() {
7012         if (regionInfoBuilder_ == null) {
7013           return regionInfo_;
7014         } else {
7015           return regionInfoBuilder_.getMessage();
7016         }
7017       }
7018       /**
7019        * <code>required .hbase.pb.RegionInfo regionInfo = 1;</code>
7020        */
7021       public Builder setRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) {
7022         if (regionInfoBuilder_ == null) {
7023           if (value == null) {
7024             throw new NullPointerException();
7025           }
7026           regionInfo_ = value;
7027           onChanged();
7028         } else {
7029           regionInfoBuilder_.setMessage(value);
7030         }
7031         bitField0_ |= 0x00000001;
7032         return this;
7033       }
7034       /**
7035        * <code>required .hbase.pb.RegionInfo regionInfo = 1;</code>
7036        */
7037       public Builder setRegionInfo(
7038           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) {
7039         if (regionInfoBuilder_ == null) {
7040           regionInfo_ = builderForValue.build();
7041           onChanged();
7042         } else {
7043           regionInfoBuilder_.setMessage(builderForValue.build());
7044         }
7045         bitField0_ |= 0x00000001;
7046         return this;
7047       }
7048       /**
7049        * <code>required .hbase.pb.RegionInfo regionInfo = 1;</code>
7050        */
7051       public Builder mergeRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) {
7052         if (regionInfoBuilder_ == null) {
7053           if (((bitField0_ & 0x00000001) == 0x00000001) &&
7054               regionInfo_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance()) {
7055             regionInfo_ =
7056               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.newBuilder(regionInfo_).mergeFrom(value).buildPartial();
7057           } else {
7058             regionInfo_ = value;
7059           }
7060           onChanged();
7061         } else {
7062           regionInfoBuilder_.mergeFrom(value);
7063         }
7064         bitField0_ |= 0x00000001;
7065         return this;
7066       }
7067       /**
7068        * <code>required .hbase.pb.RegionInfo regionInfo = 1;</code>
7069        */
7070       public Builder clearRegionInfo() {
7071         if (regionInfoBuilder_ == null) {
7072           regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance();
7073           onChanged();
7074         } else {
7075           regionInfoBuilder_.clear();
7076         }
7077         bitField0_ = (bitField0_ & ~0x00000001);
7078         return this;
7079       }
7080       /**
7081        * <code>required .hbase.pb.RegionInfo regionInfo = 1;</code>
7082        */
7083       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder getRegionInfoBuilder() {
7084         bitField0_ |= 0x00000001;
7085         onChanged();
7086         return getRegionInfoFieldBuilder().getBuilder();
7087       }
7088       /**
7089        * <code>required .hbase.pb.RegionInfo regionInfo = 1;</code>
7090        */
7091       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder() {
7092         if (regionInfoBuilder_ != null) {
7093           return regionInfoBuilder_.getMessageOrBuilder();
7094         } else {
7095           return regionInfo_;
7096         }
7097       }
7098       /**
7099        * <code>required .hbase.pb.RegionInfo regionInfo = 1;</code>
7100        */
7101       private com.google.protobuf.SingleFieldBuilder<
7102           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> 
7103           getRegionInfoFieldBuilder() {
7104         if (regionInfoBuilder_ == null) {
7105           regionInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder<
7106               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>(
7107                   regionInfo_,
7108                   getParentForChildren(),
7109                   isClean());
7110           regionInfo_ = null;
7111         }
7112         return regionInfoBuilder_;
7113       }
7114 
7115       // @@protoc_insertion_point(builder_scope:hbase.pb.WarmupRegionRequest)
7116     }
7117 
7118     static {
7119       defaultInstance = new WarmupRegionRequest(true);
7120       defaultInstance.initFields();
7121     }
7122 
7123     // @@protoc_insertion_point(class_scope:hbase.pb.WarmupRegionRequest)
7124   }
7125 
7126   public interface WarmupRegionResponseOrBuilder
7127       extends com.google.protobuf.MessageOrBuilder {
7128   }
7129   /**
7130    * Protobuf type {@code hbase.pb.WarmupRegionResponse}
7131    */
7132   public static final class WarmupRegionResponse extends
7133       com.google.protobuf.GeneratedMessage
7134       implements WarmupRegionResponseOrBuilder {
7135     // Use WarmupRegionResponse.newBuilder() to construct.
7136     private WarmupRegionResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
7137       super(builder);
7138       this.unknownFields = builder.getUnknownFields();
7139     }
7140     private WarmupRegionResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
7141 
7142     private static final WarmupRegionResponse defaultInstance;
7143     public static WarmupRegionResponse getDefaultInstance() {
7144       return defaultInstance;
7145     }
7146 
7147     public WarmupRegionResponse getDefaultInstanceForType() {
7148       return defaultInstance;
7149     }
7150 
7151     private final com.google.protobuf.UnknownFieldSet unknownFields;
7152     @java.lang.Override
7153     public final com.google.protobuf.UnknownFieldSet
7154         getUnknownFields() {
7155       return this.unknownFields;
7156     }
7157     private WarmupRegionResponse(
7158         com.google.protobuf.CodedInputStream input,
7159         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7160         throws com.google.protobuf.InvalidProtocolBufferException {
7161       initFields();
7162       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
7163           com.google.protobuf.UnknownFieldSet.newBuilder();
7164       try {
7165         boolean done = false;
7166         while (!done) {
7167           int tag = input.readTag();
7168           switch (tag) {
7169             case 0:
7170               done = true;
7171               break;
7172             default: {
7173               if (!parseUnknownField(input, unknownFields,
7174                                      extensionRegistry, tag)) {
7175                 done = true;
7176               }
7177               break;
7178             }
7179           }
7180         }
7181       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
7182         throw e.setUnfinishedMessage(this);
7183       } catch (java.io.IOException e) {
7184         throw new com.google.protobuf.InvalidProtocolBufferException(
7185             e.getMessage()).setUnfinishedMessage(this);
7186       } finally {
7187         this.unknownFields = unknownFields.build();
7188         makeExtensionsImmutable();
7189       }
7190     }
7191     public static final com.google.protobuf.Descriptors.Descriptor
7192         getDescriptor() {
7193       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_WarmupRegionResponse_descriptor;
7194     }
7195 
7196     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
7197         internalGetFieldAccessorTable() {
7198       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_WarmupRegionResponse_fieldAccessorTable
7199           .ensureFieldAccessorsInitialized(
7200               org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse.Builder.class);
7201     }
7202 
7203     public static com.google.protobuf.Parser<WarmupRegionResponse> PARSER =
7204         new com.google.protobuf.AbstractParser<WarmupRegionResponse>() {
7205       public WarmupRegionResponse parsePartialFrom(
7206           com.google.protobuf.CodedInputStream input,
7207           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7208           throws com.google.protobuf.InvalidProtocolBufferException {
7209         return new WarmupRegionResponse(input, extensionRegistry);
7210       }
7211     };
7212 
7213     @java.lang.Override
7214     public com.google.protobuf.Parser<WarmupRegionResponse> getParserForType() {
7215       return PARSER;
7216     }
7217 
7218     private void initFields() {
7219     }
7220     private byte memoizedIsInitialized = -1;
7221     public final boolean isInitialized() {
7222       byte isInitialized = memoizedIsInitialized;
7223       if (isInitialized != -1) return isInitialized == 1;
7224 
7225       memoizedIsInitialized = 1;
7226       return true;
7227     }
7228 
7229     public void writeTo(com.google.protobuf.CodedOutputStream output)
7230                         throws java.io.IOException {
7231       getSerializedSize();
7232       getUnknownFields().writeTo(output);
7233     }
7234 
7235     private int memoizedSerializedSize = -1;
7236     public int getSerializedSize() {
7237       int size = memoizedSerializedSize;
7238       if (size != -1) return size;
7239 
7240       size = 0;
7241       size += getUnknownFields().getSerializedSize();
7242       memoizedSerializedSize = size;
7243       return size;
7244     }
7245 
7246     private static final long serialVersionUID = 0L;
7247     @java.lang.Override
7248     protected java.lang.Object writeReplace()
7249         throws java.io.ObjectStreamException {
7250       return super.writeReplace();
7251     }
7252 
7253     @java.lang.Override
7254     public boolean equals(final java.lang.Object obj) {
7255       if (obj == this) {
7256        return true;
7257       }
7258       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse)) {
7259         return super.equals(obj);
7260       }
7261       org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse) obj;
7262 
7263       boolean result = true;
7264       result = result &&
7265           getUnknownFields().equals(other.getUnknownFields());
7266       return result;
7267     }
7268 
7269     private int memoizedHashCode = 0;
7270     @java.lang.Override
7271     public int hashCode() {
7272       if (memoizedHashCode != 0) {
7273         return memoizedHashCode;
7274       }
7275       int hash = 41;
7276       hash = (19 * hash) + getDescriptorForType().hashCode();
7277       hash = (29 * hash) + getUnknownFields().hashCode();
7278       memoizedHashCode = hash;
7279       return hash;
7280     }
7281 
7282     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse parseFrom(
7283         com.google.protobuf.ByteString data)
7284         throws com.google.protobuf.InvalidProtocolBufferException {
7285       return PARSER.parseFrom(data);
7286     }
7287     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse parseFrom(
7288         com.google.protobuf.ByteString data,
7289         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7290         throws com.google.protobuf.InvalidProtocolBufferException {
7291       return PARSER.parseFrom(data, extensionRegistry);
7292     }
7293     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse parseFrom(byte[] data)
7294         throws com.google.protobuf.InvalidProtocolBufferException {
7295       return PARSER.parseFrom(data);
7296     }
7297     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse parseFrom(
7298         byte[] data,
7299         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7300         throws com.google.protobuf.InvalidProtocolBufferException {
7301       return PARSER.parseFrom(data, extensionRegistry);
7302     }
7303     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse parseFrom(java.io.InputStream input)
7304         throws java.io.IOException {
7305       return PARSER.parseFrom(input);
7306     }
7307     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse parseFrom(
7308         java.io.InputStream input,
7309         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7310         throws java.io.IOException {
7311       return PARSER.parseFrom(input, extensionRegistry);
7312     }
7313     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse parseDelimitedFrom(java.io.InputStream input)
7314         throws java.io.IOException {
7315       return PARSER.parseDelimitedFrom(input);
7316     }
7317     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse parseDelimitedFrom(
7318         java.io.InputStream input,
7319         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7320         throws java.io.IOException {
7321       return PARSER.parseDelimitedFrom(input, extensionRegistry);
7322     }
7323     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse parseFrom(
7324         com.google.protobuf.CodedInputStream input)
7325         throws java.io.IOException {
7326       return PARSER.parseFrom(input);
7327     }
7328     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse parseFrom(
7329         com.google.protobuf.CodedInputStream input,
7330         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7331         throws java.io.IOException {
7332       return PARSER.parseFrom(input, extensionRegistry);
7333     }
7334 
7335     public static Builder newBuilder() { return Builder.create(); }
7336     public Builder newBuilderForType() { return newBuilder(); }
7337     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse prototype) {
7338       return newBuilder().mergeFrom(prototype);
7339     }
7340     public Builder toBuilder() { return newBuilder(this); }
7341 
7342     @java.lang.Override
7343     protected Builder newBuilderForType(
7344         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
7345       Builder builder = new Builder(parent);
7346       return builder;
7347     }
7348     /**
7349      * Protobuf type {@code hbase.pb.WarmupRegionResponse}
7350      */
7351     public static final class Builder extends
7352         com.google.protobuf.GeneratedMessage.Builder<Builder>
7353        implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponseOrBuilder {
7354       public static final com.google.protobuf.Descriptors.Descriptor
7355           getDescriptor() {
7356         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_WarmupRegionResponse_descriptor;
7357       }
7358 
7359       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
7360           internalGetFieldAccessorTable() {
7361         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_WarmupRegionResponse_fieldAccessorTable
7362             .ensureFieldAccessorsInitialized(
7363                 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse.Builder.class);
7364       }
7365 
7366       // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse.newBuilder()
7367       private Builder() {
7368         maybeForceBuilderInitialization();
7369       }
7370 
7371       private Builder(
7372           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
7373         super(parent);
7374         maybeForceBuilderInitialization();
7375       }
7376       private void maybeForceBuilderInitialization() {
7377         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
7378         }
7379       }
7380       private static Builder create() {
7381         return new Builder();
7382       }
7383 
7384       public Builder clear() {
7385         super.clear();
7386         return this;
7387       }
7388 
7389       public Builder clone() {
7390         return create().mergeFrom(buildPartial());
7391       }
7392 
7393       public com.google.protobuf.Descriptors.Descriptor
7394           getDescriptorForType() {
7395         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_WarmupRegionResponse_descriptor;
7396       }
7397 
7398       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse getDefaultInstanceForType() {
7399         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse.getDefaultInstance();
7400       }
7401 
7402       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse build() {
7403         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse result = buildPartial();
7404         if (!result.isInitialized()) {
7405           throw newUninitializedMessageException(result);
7406         }
7407         return result;
7408       }
7409 
7410       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse buildPartial() {
7411         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse(this);
7412         onBuilt();
7413         return result;
7414       }
7415 
7416       public Builder mergeFrom(com.google.protobuf.Message other) {
7417         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse) {
7418           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse)other);
7419         } else {
7420           super.mergeFrom(other);
7421           return this;
7422         }
7423       }
7424 
7425       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse other) {
7426         if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse.getDefaultInstance()) return this;
7427         this.mergeUnknownFields(other.getUnknownFields());
7428         return this;
7429       }
7430 
7431       public final boolean isInitialized() {
7432         return true;
7433       }
7434 
7435       public Builder mergeFrom(
7436           com.google.protobuf.CodedInputStream input,
7437           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7438           throws java.io.IOException {
7439         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse parsedMessage = null;
7440         try {
7441           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
7442         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
7443           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse) e.getUnfinishedMessage();
7444           throw e;
7445         } finally {
7446           if (parsedMessage != null) {
7447             mergeFrom(parsedMessage);
7448           }
7449         }
7450         return this;
7451       }
7452 
7453       // @@protoc_insertion_point(builder_scope:hbase.pb.WarmupRegionResponse)
7454     }
7455 
7456     static {
7457       defaultInstance = new WarmupRegionResponse(true);
7458       defaultInstance.initFields();
7459     }
7460 
7461     // @@protoc_insertion_point(class_scope:hbase.pb.WarmupRegionResponse)
7462   }
7463 
7464   public interface CloseRegionRequestOrBuilder
7465       extends com.google.protobuf.MessageOrBuilder {
7466 
7467     // required .hbase.pb.RegionSpecifier region = 1;
7468     /**
7469      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
7470      */
7471     boolean hasRegion();
7472     /**
7473      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
7474      */
7475     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
7476     /**
7477      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
7478      */
7479     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
7480 
7481     // optional uint32 version_of_closing_node = 2;
7482     /**
7483      * <code>optional uint32 version_of_closing_node = 2;</code>
7484      */
7485     boolean hasVersionOfClosingNode();
7486     /**
7487      * <code>optional uint32 version_of_closing_node = 2;</code>
7488      */
7489     int getVersionOfClosingNode();
7490 
7491     // optional bool transition_in_ZK = 3 [default = true];
7492     /**
7493      * <code>optional bool transition_in_ZK = 3 [default = true];</code>
7494      */
7495     boolean hasTransitionInZK();
7496     /**
7497      * <code>optional bool transition_in_ZK = 3 [default = true];</code>
7498      */
7499     boolean getTransitionInZK();
7500 
7501     // optional .hbase.pb.ServerName destination_server = 4;
7502     /**
7503      * <code>optional .hbase.pb.ServerName destination_server = 4;</code>
7504      */
7505     boolean hasDestinationServer();
7506     /**
7507      * <code>optional .hbase.pb.ServerName destination_server = 4;</code>
7508      */
7509     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDestinationServer();
7510     /**
7511      * <code>optional .hbase.pb.ServerName destination_server = 4;</code>
7512      */
7513     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDestinationServerOrBuilder();
7514 
7515     // optional uint64 serverStartCode = 5;
7516     /**
7517      * <code>optional uint64 serverStartCode = 5;</code>
7518      *
7519      * <pre>
7520      * the intended server for this RPC.
7521      * </pre>
7522      */
7523     boolean hasServerStartCode();
7524     /**
7525      * <code>optional uint64 serverStartCode = 5;</code>
7526      *
7527      * <pre>
7528      * the intended server for this RPC.
7529      * </pre>
7530      */
7531     long getServerStartCode();
7532   }
7533   /**
7534    * Protobuf type {@code hbase.pb.CloseRegionRequest}
7535    *
7536    * <pre>
7537    **
7538    * Closes the specified region and will use or not use ZK during the close
7539    * according to the specified flag.
7540    * </pre>
7541    */
7542   public static final class CloseRegionRequest extends
7543       com.google.protobuf.GeneratedMessage
7544       implements CloseRegionRequestOrBuilder {
7545     // Use CloseRegionRequest.newBuilder() to construct.
7546     private CloseRegionRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
7547       super(builder);
7548       this.unknownFields = builder.getUnknownFields();
7549     }
7550     private CloseRegionRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
7551 
7552     private static final CloseRegionRequest defaultInstance;
7553     public static CloseRegionRequest getDefaultInstance() {
7554       return defaultInstance;
7555     }
7556 
7557     public CloseRegionRequest getDefaultInstanceForType() {
7558       return defaultInstance;
7559     }
7560 
7561     private final com.google.protobuf.UnknownFieldSet unknownFields;
7562     @java.lang.Override
7563     public final com.google.protobuf.UnknownFieldSet
7564         getUnknownFields() {
7565       return this.unknownFields;
7566     }
7567     private CloseRegionRequest(
7568         com.google.protobuf.CodedInputStream input,
7569         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7570         throws com.google.protobuf.InvalidProtocolBufferException {
7571       initFields();
7572       int mutable_bitField0_ = 0;
7573       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
7574           com.google.protobuf.UnknownFieldSet.newBuilder();
7575       try {
7576         boolean done = false;
7577         while (!done) {
7578           int tag = input.readTag();
7579           switch (tag) {
7580             case 0:
7581               done = true;
7582               break;
7583             default: {
7584               if (!parseUnknownField(input, unknownFields,
7585                                      extensionRegistry, tag)) {
7586                 done = true;
7587               }
7588               break;
7589             }
7590             case 10: {
7591               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null;
7592               if (((bitField0_ & 0x00000001) == 0x00000001)) {
7593                 subBuilder = region_.toBuilder();
7594               }
7595               region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry);
7596               if (subBuilder != null) {
7597                 subBuilder.mergeFrom(region_);
7598                 region_ = subBuilder.buildPartial();
7599               }
7600               bitField0_ |= 0x00000001;
7601               break;
7602             }
7603             case 16: {
7604               bitField0_ |= 0x00000002;
7605               versionOfClosingNode_ = input.readUInt32();
7606               break;
7607             }
7608             case 24: {
7609               bitField0_ |= 0x00000004;
7610               transitionInZK_ = input.readBool();
7611               break;
7612             }
7613             case 34: {
7614               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = null;
7615               if (((bitField0_ & 0x00000008) == 0x00000008)) {
7616                 subBuilder = destinationServer_.toBuilder();
7617               }
7618               destinationServer_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry);
7619               if (subBuilder != null) {
7620                 subBuilder.mergeFrom(destinationServer_);
7621                 destinationServer_ = subBuilder.buildPartial();
7622               }
7623               bitField0_ |= 0x00000008;
7624               break;
7625             }
7626             case 40: {
7627               bitField0_ |= 0x00000010;
7628               serverStartCode_ = input.readUInt64();
7629               break;
7630             }
7631           }
7632         }
7633       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
7634         throw e.setUnfinishedMessage(this);
7635       } catch (java.io.IOException e) {
7636         throw new com.google.protobuf.InvalidProtocolBufferException(
7637             e.getMessage()).setUnfinishedMessage(this);
7638       } finally {
7639         this.unknownFields = unknownFields.build();
7640         makeExtensionsImmutable();
7641       }
7642     }
7643     public static final com.google.protobuf.Descriptors.Descriptor
7644         getDescriptor() {
7645       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_CloseRegionRequest_descriptor;
7646     }
7647 
7648     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
7649         internalGetFieldAccessorTable() {
7650       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_CloseRegionRequest_fieldAccessorTable
7651           .ensureFieldAccessorsInitialized(
7652               org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.Builder.class);
7653     }
7654 
7655     public static com.google.protobuf.Parser<CloseRegionRequest> PARSER =
7656         new com.google.protobuf.AbstractParser<CloseRegionRequest>() {
7657       public CloseRegionRequest parsePartialFrom(
7658           com.google.protobuf.CodedInputStream input,
7659           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7660           throws com.google.protobuf.InvalidProtocolBufferException {
7661         return new CloseRegionRequest(input, extensionRegistry);
7662       }
7663     };
7664 
7665     @java.lang.Override
7666     public com.google.protobuf.Parser<CloseRegionRequest> getParserForType() {
7667       return PARSER;
7668     }
7669 
7670     private int bitField0_;
7671     // required .hbase.pb.RegionSpecifier region = 1;
7672     public static final int REGION_FIELD_NUMBER = 1;
7673     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_;
7674     /**
7675      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
7676      */
7677     public boolean hasRegion() {
7678       return ((bitField0_ & 0x00000001) == 0x00000001);
7679     }
7680     /**
7681      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
7682      */
7683     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
7684       return region_;
7685     }
7686     /**
7687      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
7688      */
7689     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
7690       return region_;
7691     }
7692 
7693     // optional uint32 version_of_closing_node = 2;
7694     public static final int VERSION_OF_CLOSING_NODE_FIELD_NUMBER = 2;
7695     private int versionOfClosingNode_;
7696     /**
7697      * <code>optional uint32 version_of_closing_node = 2;</code>
7698      */
7699     public boolean hasVersionOfClosingNode() {
7700       return ((bitField0_ & 0x00000002) == 0x00000002);
7701     }
7702     /**
7703      * <code>optional uint32 version_of_closing_node = 2;</code>
7704      */
7705     public int getVersionOfClosingNode() {
7706       return versionOfClosingNode_;
7707     }
7708 
7709     // optional bool transition_in_ZK = 3 [default = true];
7710     public static final int TRANSITION_IN_ZK_FIELD_NUMBER = 3;
7711     private boolean transitionInZK_;
7712     /**
7713      * <code>optional bool transition_in_ZK = 3 [default = true];</code>
7714      */
7715     public boolean hasTransitionInZK() {
7716       return ((bitField0_ & 0x00000004) == 0x00000004);
7717     }
7718     /**
7719      * <code>optional bool transition_in_ZK = 3 [default = true];</code>
7720      */
7721     public boolean getTransitionInZK() {
7722       return transitionInZK_;
7723     }
7724 
7725     // optional .hbase.pb.ServerName destination_server = 4;
7726     public static final int DESTINATION_SERVER_FIELD_NUMBER = 4;
7727     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName destinationServer_;
7728     /**
7729      * <code>optional .hbase.pb.ServerName destination_server = 4;</code>
7730      */
7731     public boolean hasDestinationServer() {
7732       return ((bitField0_ & 0x00000008) == 0x00000008);
7733     }
7734     /**
7735      * <code>optional .hbase.pb.ServerName destination_server = 4;</code>
7736      */
7737     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDestinationServer() {
7738       return destinationServer_;
7739     }
7740     /**
7741      * <code>optional .hbase.pb.ServerName destination_server = 4;</code>
7742      */
7743     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDestinationServerOrBuilder() {
7744       return destinationServer_;
7745     }
7746 
7747     // optional uint64 serverStartCode = 5;
7748     public static final int SERVERSTARTCODE_FIELD_NUMBER = 5;
7749     private long serverStartCode_;
7750     /**
7751      * <code>optional uint64 serverStartCode = 5;</code>
7752      *
7753      * <pre>
7754      * the intended server for this RPC.
7755      * </pre>
7756      */
7757     public boolean hasServerStartCode() {
7758       return ((bitField0_ & 0x00000010) == 0x00000010);
7759     }
7760     /**
7761      * <code>optional uint64 serverStartCode = 5;</code>
7762      *
7763      * <pre>
7764      * the intended server for this RPC.
7765      * </pre>
7766      */
7767     public long getServerStartCode() {
7768       return serverStartCode_;
7769     }
7770 
7771     private void initFields() {
7772       region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
7773       versionOfClosingNode_ = 0;
7774       transitionInZK_ = true;
7775       destinationServer_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
7776       serverStartCode_ = 0L;
7777     }
7778     private byte memoizedIsInitialized = -1;
7779     public final boolean isInitialized() {
7780       byte isInitialized = memoizedIsInitialized;
7781       if (isInitialized != -1) return isInitialized == 1;
7782 
7783       if (!hasRegion()) {
7784         memoizedIsInitialized = 0;
7785         return false;
7786       }
7787       if (!getRegion().isInitialized()) {
7788         memoizedIsInitialized = 0;
7789         return false;
7790       }
7791       if (hasDestinationServer()) {
7792         if (!getDestinationServer().isInitialized()) {
7793           memoizedIsInitialized = 0;
7794           return false;
7795         }
7796       }
7797       memoizedIsInitialized = 1;
7798       return true;
7799     }
7800 
7801     public void writeTo(com.google.protobuf.CodedOutputStream output)
7802                         throws java.io.IOException {
7803       getSerializedSize();
7804       if (((bitField0_ & 0x00000001) == 0x00000001)) {
7805         output.writeMessage(1, region_);
7806       }
7807       if (((bitField0_ & 0x00000002) == 0x00000002)) {
7808         output.writeUInt32(2, versionOfClosingNode_);
7809       }
7810       if (((bitField0_ & 0x00000004) == 0x00000004)) {
7811         output.writeBool(3, transitionInZK_);
7812       }
7813       if (((bitField0_ & 0x00000008) == 0x00000008)) {
7814         output.writeMessage(4, destinationServer_);
7815       }
7816       if (((bitField0_ & 0x00000010) == 0x00000010)) {
7817         output.writeUInt64(5, serverStartCode_);
7818       }
7819       getUnknownFields().writeTo(output);
7820     }
7821 
7822     private int memoizedSerializedSize = -1;
7823     public int getSerializedSize() {
7824       int size = memoizedSerializedSize;
7825       if (size != -1) return size;
7826 
7827       size = 0;
7828       if (((bitField0_ & 0x00000001) == 0x00000001)) {
7829         size += com.google.protobuf.CodedOutputStream
7830           .computeMessageSize(1, region_);
7831       }
7832       if (((bitField0_ & 0x00000002) == 0x00000002)) {
7833         size += com.google.protobuf.CodedOutputStream
7834           .computeUInt32Size(2, versionOfClosingNode_);
7835       }
7836       if (((bitField0_ & 0x00000004) == 0x00000004)) {
7837         size += com.google.protobuf.CodedOutputStream
7838           .computeBoolSize(3, transitionInZK_);
7839       }
7840       if (((bitField0_ & 0x00000008) == 0x00000008)) {
7841         size += com.google.protobuf.CodedOutputStream
7842           .computeMessageSize(4, destinationServer_);
7843       }
7844       if (((bitField0_ & 0x00000010) == 0x00000010)) {
7845         size += com.google.protobuf.CodedOutputStream
7846           .computeUInt64Size(5, serverStartCode_);
7847       }
7848       size += getUnknownFields().getSerializedSize();
7849       memoizedSerializedSize = size;
7850       return size;
7851     }
7852 
7853     private static final long serialVersionUID = 0L;
7854     @java.lang.Override
7855     protected java.lang.Object writeReplace()
7856         throws java.io.ObjectStreamException {
7857       return super.writeReplace();
7858     }
7859 
7860     @java.lang.Override
7861     public boolean equals(final java.lang.Object obj) {
7862       if (obj == this) {
7863        return true;
7864       }
7865       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest)) {
7866         return super.equals(obj);
7867       }
7868       org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest) obj;
7869 
7870       boolean result = true;
7871       result = result && (hasRegion() == other.hasRegion());
7872       if (hasRegion()) {
7873         result = result && getRegion()
7874             .equals(other.getRegion());
7875       }
7876       result = result && (hasVersionOfClosingNode() == other.hasVersionOfClosingNode());
7877       if (hasVersionOfClosingNode()) {
7878         result = result && (getVersionOfClosingNode()
7879             == other.getVersionOfClosingNode());
7880       }
7881       result = result && (hasTransitionInZK() == other.hasTransitionInZK());
7882       if (hasTransitionInZK()) {
7883         result = result && (getTransitionInZK()
7884             == other.getTransitionInZK());
7885       }
7886       result = result && (hasDestinationServer() == other.hasDestinationServer());
7887       if (hasDestinationServer()) {
7888         result = result && getDestinationServer()
7889             .equals(other.getDestinationServer());
7890       }
7891       result = result && (hasServerStartCode() == other.hasServerStartCode());
7892       if (hasServerStartCode()) {
7893         result = result && (getServerStartCode()
7894             == other.getServerStartCode());
7895       }
7896       result = result &&
7897           getUnknownFields().equals(other.getUnknownFields());
7898       return result;
7899     }
7900 
7901     private int memoizedHashCode = 0;
7902     @java.lang.Override
7903     public int hashCode() {
7904       if (memoizedHashCode != 0) {
7905         return memoizedHashCode;
7906       }
7907       int hash = 41;
7908       hash = (19 * hash) + getDescriptorForType().hashCode();
7909       if (hasRegion()) {
7910         hash = (37 * hash) + REGION_FIELD_NUMBER;
7911         hash = (53 * hash) + getRegion().hashCode();
7912       }
7913       if (hasVersionOfClosingNode()) {
7914         hash = (37 * hash) + VERSION_OF_CLOSING_NODE_FIELD_NUMBER;
7915         hash = (53 * hash) + getVersionOfClosingNode();
7916       }
7917       if (hasTransitionInZK()) {
7918         hash = (37 * hash) + TRANSITION_IN_ZK_FIELD_NUMBER;
7919         hash = (53 * hash) + hashBoolean(getTransitionInZK());
7920       }
7921       if (hasDestinationServer()) {
7922         hash = (37 * hash) + DESTINATION_SERVER_FIELD_NUMBER;
7923         hash = (53 * hash) + getDestinationServer().hashCode();
7924       }
7925       if (hasServerStartCode()) {
7926         hash = (37 * hash) + SERVERSTARTCODE_FIELD_NUMBER;
7927         hash = (53 * hash) + hashLong(getServerStartCode());
7928       }
7929       hash = (29 * hash) + getUnknownFields().hashCode();
7930       memoizedHashCode = hash;
7931       return hash;
7932     }
7933 
7934     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom(
7935         com.google.protobuf.ByteString data)
7936         throws com.google.protobuf.InvalidProtocolBufferException {
7937       return PARSER.parseFrom(data);
7938     }
7939     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom(
7940         com.google.protobuf.ByteString data,
7941         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7942         throws com.google.protobuf.InvalidProtocolBufferException {
7943       return PARSER.parseFrom(data, extensionRegistry);
7944     }
7945     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom(byte[] data)
7946         throws com.google.protobuf.InvalidProtocolBufferException {
7947       return PARSER.parseFrom(data);
7948     }
7949     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom(
7950         byte[] data,
7951         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7952         throws com.google.protobuf.InvalidProtocolBufferException {
7953       return PARSER.parseFrom(data, extensionRegistry);
7954     }
7955     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom(java.io.InputStream input)
7956         throws java.io.IOException {
7957       return PARSER.parseFrom(input);
7958     }
7959     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom(
7960         java.io.InputStream input,
7961         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7962         throws java.io.IOException {
7963       return PARSER.parseFrom(input, extensionRegistry);
7964     }
7965     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseDelimitedFrom(java.io.InputStream input)
7966         throws java.io.IOException {
7967       return PARSER.parseDelimitedFrom(input);
7968     }
7969     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseDelimitedFrom(
7970         java.io.InputStream input,
7971         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7972         throws java.io.IOException {
7973       return PARSER.parseDelimitedFrom(input, extensionRegistry);
7974     }
7975     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom(
7976         com.google.protobuf.CodedInputStream input)
7977         throws java.io.IOException {
7978       return PARSER.parseFrom(input);
7979     }
7980     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom(
7981         com.google.protobuf.CodedInputStream input,
7982         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7983         throws java.io.IOException {
7984       return PARSER.parseFrom(input, extensionRegistry);
7985     }
7986 
7987     public static Builder newBuilder() { return Builder.create(); }
7988     public Builder newBuilderForType() { return newBuilder(); }
7989     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest prototype) {
7990       return newBuilder().mergeFrom(prototype);
7991     }
7992     public Builder toBuilder() { return newBuilder(this); }
7993 
7994     @java.lang.Override
7995     protected Builder newBuilderForType(
7996         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
7997       Builder builder = new Builder(parent);
7998       return builder;
7999     }
8000     /**
8001      * Protobuf type {@code hbase.pb.CloseRegionRequest}
8002      *
8003      * <pre>
8004      **
8005      * Closes the specified region and will use or not use ZK during the close
8006      * according to the specified flag.
8007      * </pre>
8008      */
8009     public static final class Builder extends
8010         com.google.protobuf.GeneratedMessage.Builder<Builder>
8011        implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequestOrBuilder {
8012       public static final com.google.protobuf.Descriptors.Descriptor
8013           getDescriptor() {
8014         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_CloseRegionRequest_descriptor;
8015       }
8016 
8017       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
8018           internalGetFieldAccessorTable() {
8019         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_CloseRegionRequest_fieldAccessorTable
8020             .ensureFieldAccessorsInitialized(
8021                 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.Builder.class);
8022       }
8023 
8024       // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.newBuilder()
8025       private Builder() {
8026         maybeForceBuilderInitialization();
8027       }
8028 
8029       private Builder(
8030           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
8031         super(parent);
8032         maybeForceBuilderInitialization();
8033       }
8034       private void maybeForceBuilderInitialization() {
8035         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
8036           getRegionFieldBuilder();
8037           getDestinationServerFieldBuilder();
8038         }
8039       }
8040       private static Builder create() {
8041         return new Builder();
8042       }
8043 
8044       public Builder clear() {
8045         super.clear();
8046         if (regionBuilder_ == null) {
8047           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
8048         } else {
8049           regionBuilder_.clear();
8050         }
8051         bitField0_ = (bitField0_ & ~0x00000001);
8052         versionOfClosingNode_ = 0;
8053         bitField0_ = (bitField0_ & ~0x00000002);
8054         transitionInZK_ = true;
8055         bitField0_ = (bitField0_ & ~0x00000004);
8056         if (destinationServerBuilder_ == null) {
8057           destinationServer_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
8058         } else {
8059           destinationServerBuilder_.clear();
8060         }
8061         bitField0_ = (bitField0_ & ~0x00000008);
8062         serverStartCode_ = 0L;
8063         bitField0_ = (bitField0_ & ~0x00000010);
8064         return this;
8065       }
8066 
8067       public Builder clone() {
8068         return create().mergeFrom(buildPartial());
8069       }
8070 
8071       public com.google.protobuf.Descriptors.Descriptor
8072           getDescriptorForType() {
8073         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_CloseRegionRequest_descriptor;
8074       }
8075 
8076       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest getDefaultInstanceForType() {
8077         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.getDefaultInstance();
8078       }
8079 
8080       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest build() {
8081         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest result = buildPartial();
8082         if (!result.isInitialized()) {
8083           throw newUninitializedMessageException(result);
8084         }
8085         return result;
8086       }
8087 
8088       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest buildPartial() {
8089         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest(this);
8090         int from_bitField0_ = bitField0_;
8091         int to_bitField0_ = 0;
8092         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
8093           to_bitField0_ |= 0x00000001;
8094         }
8095         if (regionBuilder_ == null) {
8096           result.region_ = region_;
8097         } else {
8098           result.region_ = regionBuilder_.build();
8099         }
8100         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
8101           to_bitField0_ |= 0x00000002;
8102         }
8103         result.versionOfClosingNode_ = versionOfClosingNode_;
8104         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
8105           to_bitField0_ |= 0x00000004;
8106         }
8107         result.transitionInZK_ = transitionInZK_;
8108         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
8109           to_bitField0_ |= 0x00000008;
8110         }
8111         if (destinationServerBuilder_ == null) {
8112           result.destinationServer_ = destinationServer_;
8113         } else {
8114           result.destinationServer_ = destinationServerBuilder_.build();
8115         }
8116         if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
8117           to_bitField0_ |= 0x00000010;
8118         }
8119         result.serverStartCode_ = serverStartCode_;
8120         result.bitField0_ = to_bitField0_;
8121         onBuilt();
8122         return result;
8123       }
8124 
8125       public Builder mergeFrom(com.google.protobuf.Message other) {
8126         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest) {
8127           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest)other);
8128         } else {
8129           super.mergeFrom(other);
8130           return this;
8131         }
8132       }
8133 
8134       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest other) {
8135         if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.getDefaultInstance()) return this;
8136         if (other.hasRegion()) {
8137           mergeRegion(other.getRegion());
8138         }
8139         if (other.hasVersionOfClosingNode()) {
8140           setVersionOfClosingNode(other.getVersionOfClosingNode());
8141         }
8142         if (other.hasTransitionInZK()) {
8143           setTransitionInZK(other.getTransitionInZK());
8144         }
8145         if (other.hasDestinationServer()) {
8146           mergeDestinationServer(other.getDestinationServer());
8147         }
8148         if (other.hasServerStartCode()) {
8149           setServerStartCode(other.getServerStartCode());
8150         }
8151         this.mergeUnknownFields(other.getUnknownFields());
8152         return this;
8153       }
8154 
8155       public final boolean isInitialized() {
8156         if (!hasRegion()) {
8157           
8158           return false;
8159         }
8160         if (!getRegion().isInitialized()) {
8161           
8162           return false;
8163         }
8164         if (hasDestinationServer()) {
8165           if (!getDestinationServer().isInitialized()) {
8166             
8167             return false;
8168           }
8169         }
8170         return true;
8171       }
8172 
8173       public Builder mergeFrom(
8174           com.google.protobuf.CodedInputStream input,
8175           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8176           throws java.io.IOException {
8177         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parsedMessage = null;
8178         try {
8179           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
8180         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
8181           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest) e.getUnfinishedMessage();
8182           throw e;
8183         } finally {
8184           if (parsedMessage != null) {
8185             mergeFrom(parsedMessage);
8186           }
8187         }
8188         return this;
8189       }
8190       private int bitField0_;
8191 
8192       // required .hbase.pb.RegionSpecifier region = 1;
8193       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
8194       private com.google.protobuf.SingleFieldBuilder<
8195           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_;
8196       /**
8197        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
8198        */
8199       public boolean hasRegion() {
8200         return ((bitField0_ & 0x00000001) == 0x00000001);
8201       }
8202       /**
8203        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
8204        */
8205       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
8206         if (regionBuilder_ == null) {
8207           return region_;
8208         } else {
8209           return regionBuilder_.getMessage();
8210         }
8211       }
8212       /**
8213        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
8214        */
8215       public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
8216         if (regionBuilder_ == null) {
8217           if (value == null) {
8218             throw new NullPointerException();
8219           }
8220           region_ = value;
8221           onChanged();
8222         } else {
8223           regionBuilder_.setMessage(value);
8224         }
8225         bitField0_ |= 0x00000001;
8226         return this;
8227       }
8228       /**
8229        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
8230        */
8231       public Builder setRegion(
8232           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
8233         if (regionBuilder_ == null) {
8234           region_ = builderForValue.build();
8235           onChanged();
8236         } else {
8237           regionBuilder_.setMessage(builderForValue.build());
8238         }
8239         bitField0_ |= 0x00000001;
8240         return this;
8241       }
8242       /**
8243        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
8244        */
8245       public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
8246         if (regionBuilder_ == null) {
8247           if (((bitField0_ & 0x00000001) == 0x00000001) &&
8248               region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) {
8249             region_ =
8250               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial();
8251           } else {
8252             region_ = value;
8253           }
8254           onChanged();
8255         } else {
8256           regionBuilder_.mergeFrom(value);
8257         }
8258         bitField0_ |= 0x00000001;
8259         return this;
8260       }
8261       /**
8262        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
8263        */
8264       public Builder clearRegion() {
8265         if (regionBuilder_ == null) {
8266           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
8267           onChanged();
8268         } else {
8269           regionBuilder_.clear();
8270         }
8271         bitField0_ = (bitField0_ & ~0x00000001);
8272         return this;
8273       }
8274       /**
8275        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
8276        */
8277       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() {
8278         bitField0_ |= 0x00000001;
8279         onChanged();
8280         return getRegionFieldBuilder().getBuilder();
8281       }
8282       /**
8283        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
8284        */
8285       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
8286         if (regionBuilder_ != null) {
8287           return regionBuilder_.getMessageOrBuilder();
8288         } else {
8289           return region_;
8290         }
8291       }
8292       /**
8293        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
8294        */
8295       private com.google.protobuf.SingleFieldBuilder<
8296           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> 
8297           getRegionFieldBuilder() {
8298         if (regionBuilder_ == null) {
8299           regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
8300               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
8301                   region_,
8302                   getParentForChildren(),
8303                   isClean());
8304           region_ = null;
8305         }
8306         return regionBuilder_;
8307       }
8308 
8309       // optional uint32 version_of_closing_node = 2;
8310       private int versionOfClosingNode_ ;
8311       /**
8312        * <code>optional uint32 version_of_closing_node = 2;</code>
8313        */
8314       public boolean hasVersionOfClosingNode() {
8315         return ((bitField0_ & 0x00000002) == 0x00000002);
8316       }
8317       /**
8318        * <code>optional uint32 version_of_closing_node = 2;</code>
8319        */
8320       public int getVersionOfClosingNode() {
8321         return versionOfClosingNode_;
8322       }
8323       /**
8324        * <code>optional uint32 version_of_closing_node = 2;</code>
8325        */
8326       public Builder setVersionOfClosingNode(int value) {
8327         bitField0_ |= 0x00000002;
8328         versionOfClosingNode_ = value;
8329         onChanged();
8330         return this;
8331       }
8332       /**
8333        * <code>optional uint32 version_of_closing_node = 2;</code>
8334        */
8335       public Builder clearVersionOfClosingNode() {
8336         bitField0_ = (bitField0_ & ~0x00000002);
8337         versionOfClosingNode_ = 0;
8338         onChanged();
8339         return this;
8340       }
8341 
8342       // optional bool transition_in_ZK = 3 [default = true];
8343       private boolean transitionInZK_ = true;
8344       /**
8345        * <code>optional bool transition_in_ZK = 3 [default = true];</code>
8346        */
8347       public boolean hasTransitionInZK() {
8348         return ((bitField0_ & 0x00000004) == 0x00000004);
8349       }
8350       /**
8351        * <code>optional bool transition_in_ZK = 3 [default = true];</code>
8352        */
8353       public boolean getTransitionInZK() {
8354         return transitionInZK_;
8355       }
8356       /**
8357        * <code>optional bool transition_in_ZK = 3 [default = true];</code>
8358        */
8359       public Builder setTransitionInZK(boolean value) {
8360         bitField0_ |= 0x00000004;
8361         transitionInZK_ = value;
8362         onChanged();
8363         return this;
8364       }
8365       /**
8366        * <code>optional bool transition_in_ZK = 3 [default = true];</code>
8367        */
8368       public Builder clearTransitionInZK() {
8369         bitField0_ = (bitField0_ & ~0x00000004);
8370         transitionInZK_ = true;
8371         onChanged();
8372         return this;
8373       }
8374 
8375       // optional .hbase.pb.ServerName destination_server = 4;
8376       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName destinationServer_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
8377       private com.google.protobuf.SingleFieldBuilder<
8378           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> destinationServerBuilder_;
8379       /**
8380        * <code>optional .hbase.pb.ServerName destination_server = 4;</code>
8381        */
8382       public boolean hasDestinationServer() {
8383         return ((bitField0_ & 0x00000008) == 0x00000008);
8384       }
8385       /**
8386        * <code>optional .hbase.pb.ServerName destination_server = 4;</code>
8387        */
8388       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDestinationServer() {
8389         if (destinationServerBuilder_ == null) {
8390           return destinationServer_;
8391         } else {
8392           return destinationServerBuilder_.getMessage();
8393         }
8394       }
8395       /**
8396        * <code>optional .hbase.pb.ServerName destination_server = 4;</code>
8397        */
8398       public Builder setDestinationServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) {
8399         if (destinationServerBuilder_ == null) {
8400           if (value == null) {
8401             throw new NullPointerException();
8402           }
8403           destinationServer_ = value;
8404           onChanged();
8405         } else {
8406           destinationServerBuilder_.setMessage(value);
8407         }
8408         bitField0_ |= 0x00000008;
8409         return this;
8410       }
8411       /**
8412        * <code>optional .hbase.pb.ServerName destination_server = 4;</code>
8413        */
8414       public Builder setDestinationServer(
8415           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) {
8416         if (destinationServerBuilder_ == null) {
8417           destinationServer_ = builderForValue.build();
8418           onChanged();
8419         } else {
8420           destinationServerBuilder_.setMessage(builderForValue.build());
8421         }
8422         bitField0_ |= 0x00000008;
8423         return this;
8424       }
8425       /**
8426        * <code>optional .hbase.pb.ServerName destination_server = 4;</code>
8427        */
8428       public Builder mergeDestinationServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) {
8429         if (destinationServerBuilder_ == null) {
8430           if (((bitField0_ & 0x00000008) == 0x00000008) &&
8431               destinationServer_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) {
8432             destinationServer_ =
8433               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(destinationServer_).mergeFrom(value).buildPartial();
8434           } else {
8435             destinationServer_ = value;
8436           }
8437           onChanged();
8438         } else {
8439           destinationServerBuilder_.mergeFrom(value);
8440         }
8441         bitField0_ |= 0x00000008;
8442         return this;
8443       }
8444       /**
8445        * <code>optional .hbase.pb.ServerName destination_server = 4;</code>
8446        */
8447       public Builder clearDestinationServer() {
8448         if (destinationServerBuilder_ == null) {
8449           destinationServer_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
8450           onChanged();
8451         } else {
8452           destinationServerBuilder_.clear();
8453         }
8454         bitField0_ = (bitField0_ & ~0x00000008);
8455         return this;
8456       }
8457       /**
8458        * <code>optional .hbase.pb.ServerName destination_server = 4;</code>
8459        */
8460       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getDestinationServerBuilder() {
8461         bitField0_ |= 0x00000008;
8462         onChanged();
8463         return getDestinationServerFieldBuilder().getBuilder();
8464       }
8465       /**
8466        * <code>optional .hbase.pb.ServerName destination_server = 4;</code>
8467        */
8468       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDestinationServerOrBuilder() {
8469         if (destinationServerBuilder_ != null) {
8470           return destinationServerBuilder_.getMessageOrBuilder();
8471         } else {
8472           return destinationServer_;
8473         }
8474       }
8475       /**
8476        * <code>optional .hbase.pb.ServerName destination_server = 4;</code>
8477        */
8478       private com.google.protobuf.SingleFieldBuilder<
8479           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> 
8480           getDestinationServerFieldBuilder() {
8481         if (destinationServerBuilder_ == null) {
8482           destinationServerBuilder_ = new com.google.protobuf.SingleFieldBuilder<
8483               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>(
8484                   destinationServer_,
8485                   getParentForChildren(),
8486                   isClean());
8487           destinationServer_ = null;
8488         }
8489         return destinationServerBuilder_;
8490       }
8491 
8492       // optional uint64 serverStartCode = 5;
8493       private long serverStartCode_ ;
8494       /**
8495        * <code>optional uint64 serverStartCode = 5;</code>
8496        *
8497        * <pre>
8498        * the intended server for this RPC.
8499        * </pre>
8500        */
8501       public boolean hasServerStartCode() {
8502         return ((bitField0_ & 0x00000010) == 0x00000010);
8503       }
8504       /**
8505        * <code>optional uint64 serverStartCode = 5;</code>
8506        *
8507        * <pre>
8508        * the intended server for this RPC.
8509        * </pre>
8510        */
8511       public long getServerStartCode() {
8512         return serverStartCode_;
8513       }
8514       /**
8515        * <code>optional uint64 serverStartCode = 5;</code>
8516        *
8517        * <pre>
8518        * the intended server for this RPC.
8519        * </pre>
8520        */
8521       public Builder setServerStartCode(long value) {
8522         bitField0_ |= 0x00000010;
8523         serverStartCode_ = value;
8524         onChanged();
8525         return this;
8526       }
8527       /**
8528        * <code>optional uint64 serverStartCode = 5;</code>
8529        *
8530        * <pre>
8531        * the intended server for this RPC.
8532        * </pre>
8533        */
8534       public Builder clearServerStartCode() {
8535         bitField0_ = (bitField0_ & ~0x00000010);
8536         serverStartCode_ = 0L;
8537         onChanged();
8538         return this;
8539       }
8540 
8541       // @@protoc_insertion_point(builder_scope:hbase.pb.CloseRegionRequest)
8542     }
8543 
8544     static {
8545       defaultInstance = new CloseRegionRequest(true);
8546       defaultInstance.initFields();
8547     }
8548 
8549     // @@protoc_insertion_point(class_scope:hbase.pb.CloseRegionRequest)
8550   }
8551 
8552   public interface CloseRegionResponseOrBuilder
8553       extends com.google.protobuf.MessageOrBuilder {
8554 
8555     // required bool closed = 1;
8556     /**
8557      * <code>required bool closed = 1;</code>
8558      */
8559     boolean hasClosed();
8560     /**
8561      * <code>required bool closed = 1;</code>
8562      */
8563     boolean getClosed();
8564   }
8565   /**
8566    * Protobuf type {@code hbase.pb.CloseRegionResponse}
8567    */
8568   public static final class CloseRegionResponse extends
8569       com.google.protobuf.GeneratedMessage
8570       implements CloseRegionResponseOrBuilder {
8571     // Use CloseRegionResponse.newBuilder() to construct.
8572     private CloseRegionResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
8573       super(builder);
8574       this.unknownFields = builder.getUnknownFields();
8575     }
8576     private CloseRegionResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
8577 
8578     private static final CloseRegionResponse defaultInstance;
8579     public static CloseRegionResponse getDefaultInstance() {
8580       return defaultInstance;
8581     }
8582 
8583     public CloseRegionResponse getDefaultInstanceForType() {
8584       return defaultInstance;
8585     }
8586 
8587     private final com.google.protobuf.UnknownFieldSet unknownFields;
8588     @java.lang.Override
8589     public final com.google.protobuf.UnknownFieldSet
8590         getUnknownFields() {
8591       return this.unknownFields;
8592     }
8593     private CloseRegionResponse(
8594         com.google.protobuf.CodedInputStream input,
8595         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8596         throws com.google.protobuf.InvalidProtocolBufferException {
8597       initFields();
8598       int mutable_bitField0_ = 0;
8599       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
8600           com.google.protobuf.UnknownFieldSet.newBuilder();
8601       try {
8602         boolean done = false;
8603         while (!done) {
8604           int tag = input.readTag();
8605           switch (tag) {
8606             case 0:
8607               done = true;
8608               break;
8609             default: {
8610               if (!parseUnknownField(input, unknownFields,
8611                                      extensionRegistry, tag)) {
8612                 done = true;
8613               }
8614               break;
8615             }
8616             case 8: {
8617               bitField0_ |= 0x00000001;
8618               closed_ = input.readBool();
8619               break;
8620             }
8621           }
8622         }
8623       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
8624         throw e.setUnfinishedMessage(this);
8625       } catch (java.io.IOException e) {
8626         throw new com.google.protobuf.InvalidProtocolBufferException(
8627             e.getMessage()).setUnfinishedMessage(this);
8628       } finally {
8629         this.unknownFields = unknownFields.build();
8630         makeExtensionsImmutable();
8631       }
8632     }
8633     public static final com.google.protobuf.Descriptors.Descriptor
8634         getDescriptor() {
8635       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_CloseRegionResponse_descriptor;
8636     }
8637 
8638     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
8639         internalGetFieldAccessorTable() {
8640       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_CloseRegionResponse_fieldAccessorTable
8641           .ensureFieldAccessorsInitialized(
8642               org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.Builder.class);
8643     }
8644 
8645     public static com.google.protobuf.Parser<CloseRegionResponse> PARSER =
8646         new com.google.protobuf.AbstractParser<CloseRegionResponse>() {
8647       public CloseRegionResponse parsePartialFrom(
8648           com.google.protobuf.CodedInputStream input,
8649           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8650           throws com.google.protobuf.InvalidProtocolBufferException {
8651         return new CloseRegionResponse(input, extensionRegistry);
8652       }
8653     };
8654 
8655     @java.lang.Override
8656     public com.google.protobuf.Parser<CloseRegionResponse> getParserForType() {
8657       return PARSER;
8658     }
8659 
8660     private int bitField0_;
8661     // required bool closed = 1;
8662     public static final int CLOSED_FIELD_NUMBER = 1;
8663     private boolean closed_;
8664     /**
8665      * <code>required bool closed = 1;</code>
8666      */
8667     public boolean hasClosed() {
8668       return ((bitField0_ & 0x00000001) == 0x00000001);
8669     }
8670     /**
8671      * <code>required bool closed = 1;</code>
8672      */
8673     public boolean getClosed() {
8674       return closed_;
8675     }
8676 
8677     private void initFields() {
8678       closed_ = false;
8679     }
8680     private byte memoizedIsInitialized = -1;
8681     public final boolean isInitialized() {
8682       byte isInitialized = memoizedIsInitialized;
8683       if (isInitialized != -1) return isInitialized == 1;
8684 
8685       if (!hasClosed()) {
8686         memoizedIsInitialized = 0;
8687         return false;
8688       }
8689       memoizedIsInitialized = 1;
8690       return true;
8691     }
8692 
8693     public void writeTo(com.google.protobuf.CodedOutputStream output)
8694                         throws java.io.IOException {
8695       getSerializedSize();
8696       if (((bitField0_ & 0x00000001) == 0x00000001)) {
8697         output.writeBool(1, closed_);
8698       }
8699       getUnknownFields().writeTo(output);
8700     }
8701 
8702     private int memoizedSerializedSize = -1;
8703     public int getSerializedSize() {
8704       int size = memoizedSerializedSize;
8705       if (size != -1) return size;
8706 
8707       size = 0;
8708       if (((bitField0_ & 0x00000001) == 0x00000001)) {
8709         size += com.google.protobuf.CodedOutputStream
8710           .computeBoolSize(1, closed_);
8711       }
8712       size += getUnknownFields().getSerializedSize();
8713       memoizedSerializedSize = size;
8714       return size;
8715     }
8716 
8717     private static final long serialVersionUID = 0L;
8718     @java.lang.Override
8719     protected java.lang.Object writeReplace()
8720         throws java.io.ObjectStreamException {
8721       return super.writeReplace();
8722     }
8723 
8724     @java.lang.Override
8725     public boolean equals(final java.lang.Object obj) {
8726       if (obj == this) {
8727        return true;
8728       }
8729       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse)) {
8730         return super.equals(obj);
8731       }
8732       org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse) obj;
8733 
8734       boolean result = true;
8735       result = result && (hasClosed() == other.hasClosed());
8736       if (hasClosed()) {
8737         result = result && (getClosed()
8738             == other.getClosed());
8739       }
8740       result = result &&
8741           getUnknownFields().equals(other.getUnknownFields());
8742       return result;
8743     }
8744 
8745     private int memoizedHashCode = 0;
8746     @java.lang.Override
8747     public int hashCode() {
8748       if (memoizedHashCode != 0) {
8749         return memoizedHashCode;
8750       }
8751       int hash = 41;
8752       hash = (19 * hash) + getDescriptorForType().hashCode();
8753       if (hasClosed()) {
8754         hash = (37 * hash) + CLOSED_FIELD_NUMBER;
8755         hash = (53 * hash) + hashBoolean(getClosed());
8756       }
8757       hash = (29 * hash) + getUnknownFields().hashCode();
8758       memoizedHashCode = hash;
8759       return hash;
8760     }
8761 
8762     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom(
8763         com.google.protobuf.ByteString data)
8764         throws com.google.protobuf.InvalidProtocolBufferException {
8765       return PARSER.parseFrom(data);
8766     }
8767     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom(
8768         com.google.protobuf.ByteString data,
8769         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8770         throws com.google.protobuf.InvalidProtocolBufferException {
8771       return PARSER.parseFrom(data, extensionRegistry);
8772     }
8773     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom(byte[] data)
8774         throws com.google.protobuf.InvalidProtocolBufferException {
8775       return PARSER.parseFrom(data);
8776     }
8777     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom(
8778         byte[] data,
8779         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8780         throws com.google.protobuf.InvalidProtocolBufferException {
8781       return PARSER.parseFrom(data, extensionRegistry);
8782     }
8783     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom(java.io.InputStream input)
8784         throws java.io.IOException {
8785       return PARSER.parseFrom(input);
8786     }
8787     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom(
8788         java.io.InputStream input,
8789         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8790         throws java.io.IOException {
8791       return PARSER.parseFrom(input, extensionRegistry);
8792     }
8793     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseDelimitedFrom(java.io.InputStream input)
8794         throws java.io.IOException {
8795       return PARSER.parseDelimitedFrom(input);
8796     }
8797     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseDelimitedFrom(
8798         java.io.InputStream input,
8799         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8800         throws java.io.IOException {
8801       return PARSER.parseDelimitedFrom(input, extensionRegistry);
8802     }
8803     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom(
8804         com.google.protobuf.CodedInputStream input)
8805         throws java.io.IOException {
8806       return PARSER.parseFrom(input);
8807     }
8808     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom(
8809         com.google.protobuf.CodedInputStream input,
8810         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8811         throws java.io.IOException {
8812       return PARSER.parseFrom(input, extensionRegistry);
8813     }
8814 
8815     public static Builder newBuilder() { return Builder.create(); }
8816     public Builder newBuilderForType() { return newBuilder(); }
8817     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse prototype) {
8818       return newBuilder().mergeFrom(prototype);
8819     }
8820     public Builder toBuilder() { return newBuilder(this); }
8821 
8822     @java.lang.Override
8823     protected Builder newBuilderForType(
8824         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
8825       Builder builder = new Builder(parent);
8826       return builder;
8827     }
8828     /**
8829      * Protobuf type {@code hbase.pb.CloseRegionResponse}
8830      */
8831     public static final class Builder extends
8832         com.google.protobuf.GeneratedMessage.Builder<Builder>
8833        implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponseOrBuilder {
8834       public static final com.google.protobuf.Descriptors.Descriptor
8835           getDescriptor() {
8836         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_CloseRegionResponse_descriptor;
8837       }
8838 
8839       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
8840           internalGetFieldAccessorTable() {
8841         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_CloseRegionResponse_fieldAccessorTable
8842             .ensureFieldAccessorsInitialized(
8843                 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.Builder.class);
8844       }
8845 
8846       // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.newBuilder()
8847       private Builder() {
8848         maybeForceBuilderInitialization();
8849       }
8850 
8851       private Builder(
8852           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
8853         super(parent);
8854         maybeForceBuilderInitialization();
8855       }
8856       private void maybeForceBuilderInitialization() {
8857         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
8858         }
8859       }
8860       private static Builder create() {
8861         return new Builder();
8862       }
8863 
8864       public Builder clear() {
8865         super.clear();
8866         closed_ = false;
8867         bitField0_ = (bitField0_ & ~0x00000001);
8868         return this;
8869       }
8870 
8871       public Builder clone() {
8872         return create().mergeFrom(buildPartial());
8873       }
8874 
8875       public com.google.protobuf.Descriptors.Descriptor
8876           getDescriptorForType() {
8877         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_CloseRegionResponse_descriptor;
8878       }
8879 
8880       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse getDefaultInstanceForType() {
8881         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDefaultInstance();
8882       }
8883 
8884       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse build() {
8885         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse result = buildPartial();
8886         if (!result.isInitialized()) {
8887           throw newUninitializedMessageException(result);
8888         }
8889         return result;
8890       }
8891 
8892       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse buildPartial() {
8893         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse(this);
8894         int from_bitField0_ = bitField0_;
8895         int to_bitField0_ = 0;
8896         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
8897           to_bitField0_ |= 0x00000001;
8898         }
8899         result.closed_ = closed_;
8900         result.bitField0_ = to_bitField0_;
8901         onBuilt();
8902         return result;
8903       }
8904 
8905       public Builder mergeFrom(com.google.protobuf.Message other) {
8906         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse) {
8907           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse)other);
8908         } else {
8909           super.mergeFrom(other);
8910           return this;
8911         }
8912       }
8913 
8914       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse other) {
8915         if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDefaultInstance()) return this;
8916         if (other.hasClosed()) {
8917           setClosed(other.getClosed());
8918         }
8919         this.mergeUnknownFields(other.getUnknownFields());
8920         return this;
8921       }
8922 
8923       public final boolean isInitialized() {
8924         if (!hasClosed()) {
8925           
8926           return false;
8927         }
8928         return true;
8929       }
8930 
8931       public Builder mergeFrom(
8932           com.google.protobuf.CodedInputStream input,
8933           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8934           throws java.io.IOException {
8935         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parsedMessage = null;
8936         try {
8937           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
8938         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
8939           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse) e.getUnfinishedMessage();
8940           throw e;
8941         } finally {
8942           if (parsedMessage != null) {
8943             mergeFrom(parsedMessage);
8944           }
8945         }
8946         return this;
8947       }
8948       private int bitField0_;
8949 
8950       // required bool closed = 1;
8951       private boolean closed_ ;
8952       /**
8953        * <code>required bool closed = 1;</code>
8954        */
8955       public boolean hasClosed() {
8956         return ((bitField0_ & 0x00000001) == 0x00000001);
8957       }
8958       /**
8959        * <code>required bool closed = 1;</code>
8960        */
8961       public boolean getClosed() {
8962         return closed_;
8963       }
8964       /**
8965        * <code>required bool closed = 1;</code>
8966        */
8967       public Builder setClosed(boolean value) {
8968         bitField0_ |= 0x00000001;
8969         closed_ = value;
8970         onChanged();
8971         return this;
8972       }
8973       /**
8974        * <code>required bool closed = 1;</code>
8975        */
8976       public Builder clearClosed() {
8977         bitField0_ = (bitField0_ & ~0x00000001);
8978         closed_ = false;
8979         onChanged();
8980         return this;
8981       }
8982 
8983       // @@protoc_insertion_point(builder_scope:hbase.pb.CloseRegionResponse)
8984     }
8985 
8986     static {
8987       defaultInstance = new CloseRegionResponse(true);
8988       defaultInstance.initFields();
8989     }
8990 
8991     // @@protoc_insertion_point(class_scope:hbase.pb.CloseRegionResponse)
8992   }
8993 
8994   public interface FlushRegionRequestOrBuilder
8995       extends com.google.protobuf.MessageOrBuilder {
8996 
8997     // required .hbase.pb.RegionSpecifier region = 1;
8998     /**
8999      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
9000      */
9001     boolean hasRegion();
9002     /**
9003      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
9004      */
9005     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
9006     /**
9007      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
9008      */
9009     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
9010 
9011     // optional uint64 if_older_than_ts = 2;
9012     /**
9013      * <code>optional uint64 if_older_than_ts = 2;</code>
9014      */
9015     boolean hasIfOlderThanTs();
9016     /**
9017      * <code>optional uint64 if_older_than_ts = 2;</code>
9018      */
9019     long getIfOlderThanTs();
9020 
9021     // optional bool write_flush_wal_marker = 3;
9022     /**
9023      * <code>optional bool write_flush_wal_marker = 3;</code>
9024      *
9025      * <pre>
9026      * whether to write a marker to WAL even if not flushed
9027      * </pre>
9028      */
9029     boolean hasWriteFlushWalMarker();
9030     /**
9031      * <code>optional bool write_flush_wal_marker = 3;</code>
9032      *
9033      * <pre>
9034      * whether to write a marker to WAL even if not flushed
9035      * </pre>
9036      */
9037     boolean getWriteFlushWalMarker();
9038   }
9039   /**
9040    * Protobuf type {@code hbase.pb.FlushRegionRequest}
9041    *
9042    * <pre>
9043    **
9044    * Flushes the MemStore of the specified region.
9045    * &lt;p&gt;
9046    * This method is synchronous.
9047    * </pre>
9048    */
9049   public static final class FlushRegionRequest extends
9050       com.google.protobuf.GeneratedMessage
9051       implements FlushRegionRequestOrBuilder {
9052     // Use FlushRegionRequest.newBuilder() to construct.
9053     private FlushRegionRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
9054       super(builder);
9055       this.unknownFields = builder.getUnknownFields();
9056     }
9057     private FlushRegionRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
9058 
9059     private static final FlushRegionRequest defaultInstance;
9060     public static FlushRegionRequest getDefaultInstance() {
9061       return defaultInstance;
9062     }
9063 
9064     public FlushRegionRequest getDefaultInstanceForType() {
9065       return defaultInstance;
9066     }
9067 
9068     private final com.google.protobuf.UnknownFieldSet unknownFields;
9069     @java.lang.Override
9070     public final com.google.protobuf.UnknownFieldSet
9071         getUnknownFields() {
9072       return this.unknownFields;
9073     }
9074     private FlushRegionRequest(
9075         com.google.protobuf.CodedInputStream input,
9076         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9077         throws com.google.protobuf.InvalidProtocolBufferException {
9078       initFields();
9079       int mutable_bitField0_ = 0;
9080       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
9081           com.google.protobuf.UnknownFieldSet.newBuilder();
9082       try {
9083         boolean done = false;
9084         while (!done) {
9085           int tag = input.readTag();
9086           switch (tag) {
9087             case 0:
9088               done = true;
9089               break;
9090             default: {
9091               if (!parseUnknownField(input, unknownFields,
9092                                      extensionRegistry, tag)) {
9093                 done = true;
9094               }
9095               break;
9096             }
9097             case 10: {
9098               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null;
9099               if (((bitField0_ & 0x00000001) == 0x00000001)) {
9100                 subBuilder = region_.toBuilder();
9101               }
9102               region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry);
9103               if (subBuilder != null) {
9104                 subBuilder.mergeFrom(region_);
9105                 region_ = subBuilder.buildPartial();
9106               }
9107               bitField0_ |= 0x00000001;
9108               break;
9109             }
9110             case 16: {
9111               bitField0_ |= 0x00000002;
9112               ifOlderThanTs_ = input.readUInt64();
9113               break;
9114             }
9115             case 24: {
9116               bitField0_ |= 0x00000004;
9117               writeFlushWalMarker_ = input.readBool();
9118               break;
9119             }
9120           }
9121         }
9122       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
9123         throw e.setUnfinishedMessage(this);
9124       } catch (java.io.IOException e) {
9125         throw new com.google.protobuf.InvalidProtocolBufferException(
9126             e.getMessage()).setUnfinishedMessage(this);
9127       } finally {
9128         this.unknownFields = unknownFields.build();
9129         makeExtensionsImmutable();
9130       }
9131     }
9132     public static final com.google.protobuf.Descriptors.Descriptor
9133         getDescriptor() {
9134       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_FlushRegionRequest_descriptor;
9135     }
9136 
9137     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
9138         internalGetFieldAccessorTable() {
9139       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_FlushRegionRequest_fieldAccessorTable
9140           .ensureFieldAccessorsInitialized(
9141               org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.Builder.class);
9142     }
9143 
9144     public static com.google.protobuf.Parser<FlushRegionRequest> PARSER =
9145         new com.google.protobuf.AbstractParser<FlushRegionRequest>() {
9146       public FlushRegionRequest parsePartialFrom(
9147           com.google.protobuf.CodedInputStream input,
9148           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9149           throws com.google.protobuf.InvalidProtocolBufferException {
9150         return new FlushRegionRequest(input, extensionRegistry);
9151       }
9152     };
9153 
9154     @java.lang.Override
9155     public com.google.protobuf.Parser<FlushRegionRequest> getParserForType() {
9156       return PARSER;
9157     }
9158 
9159     private int bitField0_;
9160     // required .hbase.pb.RegionSpecifier region = 1;
9161     public static final int REGION_FIELD_NUMBER = 1;
9162     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_;
9163     /**
9164      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
9165      */
9166     public boolean hasRegion() {
9167       return ((bitField0_ & 0x00000001) == 0x00000001);
9168     }
9169     /**
9170      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
9171      */
9172     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
9173       return region_;
9174     }
9175     /**
9176      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
9177      */
9178     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
9179       return region_;
9180     }
9181 
9182     // optional uint64 if_older_than_ts = 2;
9183     public static final int IF_OLDER_THAN_TS_FIELD_NUMBER = 2;
9184     private long ifOlderThanTs_;
9185     /**
9186      * <code>optional uint64 if_older_than_ts = 2;</code>
9187      */
9188     public boolean hasIfOlderThanTs() {
9189       return ((bitField0_ & 0x00000002) == 0x00000002);
9190     }
9191     /**
9192      * <code>optional uint64 if_older_than_ts = 2;</code>
9193      */
9194     public long getIfOlderThanTs() {
9195       return ifOlderThanTs_;
9196     }
9197 
9198     // optional bool write_flush_wal_marker = 3;
9199     public static final int WRITE_FLUSH_WAL_MARKER_FIELD_NUMBER = 3;
9200     private boolean writeFlushWalMarker_;
9201     /**
9202      * <code>optional bool write_flush_wal_marker = 3;</code>
9203      *
9204      * <pre>
9205      * whether to write a marker to WAL even if not flushed
9206      * </pre>
9207      */
9208     public boolean hasWriteFlushWalMarker() {
9209       return ((bitField0_ & 0x00000004) == 0x00000004);
9210     }
9211     /**
9212      * <code>optional bool write_flush_wal_marker = 3;</code>
9213      *
9214      * <pre>
9215      * whether to write a marker to WAL even if not flushed
9216      * </pre>
9217      */
9218     public boolean getWriteFlushWalMarker() {
9219       return writeFlushWalMarker_;
9220     }
9221 
9222     private void initFields() {
9223       region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
9224       ifOlderThanTs_ = 0L;
9225       writeFlushWalMarker_ = false;
9226     }
9227     private byte memoizedIsInitialized = -1;
9228     public final boolean isInitialized() {
9229       byte isInitialized = memoizedIsInitialized;
9230       if (isInitialized != -1) return isInitialized == 1;
9231 
9232       if (!hasRegion()) {
9233         memoizedIsInitialized = 0;
9234         return false;
9235       }
9236       if (!getRegion().isInitialized()) {
9237         memoizedIsInitialized = 0;
9238         return false;
9239       }
9240       memoizedIsInitialized = 1;
9241       return true;
9242     }
9243 
9244     public void writeTo(com.google.protobuf.CodedOutputStream output)
9245                         throws java.io.IOException {
9246       getSerializedSize();
9247       if (((bitField0_ & 0x00000001) == 0x00000001)) {
9248         output.writeMessage(1, region_);
9249       }
9250       if (((bitField0_ & 0x00000002) == 0x00000002)) {
9251         output.writeUInt64(2, ifOlderThanTs_);
9252       }
9253       if (((bitField0_ & 0x00000004) == 0x00000004)) {
9254         output.writeBool(3, writeFlushWalMarker_);
9255       }
9256       getUnknownFields().writeTo(output);
9257     }
9258 
9259     private int memoizedSerializedSize = -1;
9260     public int getSerializedSize() {
9261       int size = memoizedSerializedSize;
9262       if (size != -1) return size;
9263 
9264       size = 0;
9265       if (((bitField0_ & 0x00000001) == 0x00000001)) {
9266         size += com.google.protobuf.CodedOutputStream
9267           .computeMessageSize(1, region_);
9268       }
9269       if (((bitField0_ & 0x00000002) == 0x00000002)) {
9270         size += com.google.protobuf.CodedOutputStream
9271           .computeUInt64Size(2, ifOlderThanTs_);
9272       }
9273       if (((bitField0_ & 0x00000004) == 0x00000004)) {
9274         size += com.google.protobuf.CodedOutputStream
9275           .computeBoolSize(3, writeFlushWalMarker_);
9276       }
9277       size += getUnknownFields().getSerializedSize();
9278       memoizedSerializedSize = size;
9279       return size;
9280     }
9281 
9282     private static final long serialVersionUID = 0L;
9283     @java.lang.Override
9284     protected java.lang.Object writeReplace()
9285         throws java.io.ObjectStreamException {
9286       return super.writeReplace();
9287     }
9288 
9289     @java.lang.Override
9290     public boolean equals(final java.lang.Object obj) {
9291       if (obj == this) {
9292        return true;
9293       }
9294       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest)) {
9295         return super.equals(obj);
9296       }
9297       org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest) obj;
9298 
9299       boolean result = true;
9300       result = result && (hasRegion() == other.hasRegion());
9301       if (hasRegion()) {
9302         result = result && getRegion()
9303             .equals(other.getRegion());
9304       }
9305       result = result && (hasIfOlderThanTs() == other.hasIfOlderThanTs());
9306       if (hasIfOlderThanTs()) {
9307         result = result && (getIfOlderThanTs()
9308             == other.getIfOlderThanTs());
9309       }
9310       result = result && (hasWriteFlushWalMarker() == other.hasWriteFlushWalMarker());
9311       if (hasWriteFlushWalMarker()) {
9312         result = result && (getWriteFlushWalMarker()
9313             == other.getWriteFlushWalMarker());
9314       }
9315       result = result &&
9316           getUnknownFields().equals(other.getUnknownFields());
9317       return result;
9318     }
9319 
9320     private int memoizedHashCode = 0;
9321     @java.lang.Override
9322     public int hashCode() {
9323       if (memoizedHashCode != 0) {
9324         return memoizedHashCode;
9325       }
9326       int hash = 41;
9327       hash = (19 * hash) + getDescriptorForType().hashCode();
9328       if (hasRegion()) {
9329         hash = (37 * hash) + REGION_FIELD_NUMBER;
9330         hash = (53 * hash) + getRegion().hashCode();
9331       }
9332       if (hasIfOlderThanTs()) {
9333         hash = (37 * hash) + IF_OLDER_THAN_TS_FIELD_NUMBER;
9334         hash = (53 * hash) + hashLong(getIfOlderThanTs());
9335       }
9336       if (hasWriteFlushWalMarker()) {
9337         hash = (37 * hash) + WRITE_FLUSH_WAL_MARKER_FIELD_NUMBER;
9338         hash = (53 * hash) + hashBoolean(getWriteFlushWalMarker());
9339       }
9340       hash = (29 * hash) + getUnknownFields().hashCode();
9341       memoizedHashCode = hash;
9342       return hash;
9343     }
9344 
9345     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom(
9346         com.google.protobuf.ByteString data)
9347         throws com.google.protobuf.InvalidProtocolBufferException {
9348       return PARSER.parseFrom(data);
9349     }
9350     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom(
9351         com.google.protobuf.ByteString data,
9352         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9353         throws com.google.protobuf.InvalidProtocolBufferException {
9354       return PARSER.parseFrom(data, extensionRegistry);
9355     }
9356     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom(byte[] data)
9357         throws com.google.protobuf.InvalidProtocolBufferException {
9358       return PARSER.parseFrom(data);
9359     }
9360     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom(
9361         byte[] data,
9362         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9363         throws com.google.protobuf.InvalidProtocolBufferException {
9364       return PARSER.parseFrom(data, extensionRegistry);
9365     }
9366     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom(java.io.InputStream input)
9367         throws java.io.IOException {
9368       return PARSER.parseFrom(input);
9369     }
9370     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom(
9371         java.io.InputStream input,
9372         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9373         throws java.io.IOException {
9374       return PARSER.parseFrom(input, extensionRegistry);
9375     }
9376     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseDelimitedFrom(java.io.InputStream input)
9377         throws java.io.IOException {
9378       return PARSER.parseDelimitedFrom(input);
9379     }
9380     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseDelimitedFrom(
9381         java.io.InputStream input,
9382         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9383         throws java.io.IOException {
9384       return PARSER.parseDelimitedFrom(input, extensionRegistry);
9385     }
9386     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom(
9387         com.google.protobuf.CodedInputStream input)
9388         throws java.io.IOException {
9389       return PARSER.parseFrom(input);
9390     }
9391     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom(
9392         com.google.protobuf.CodedInputStream input,
9393         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9394         throws java.io.IOException {
9395       return PARSER.parseFrom(input, extensionRegistry);
9396     }
9397 
9398     public static Builder newBuilder() { return Builder.create(); }
9399     public Builder newBuilderForType() { return newBuilder(); }
9400     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest prototype) {
9401       return newBuilder().mergeFrom(prototype);
9402     }
9403     public Builder toBuilder() { return newBuilder(this); }
9404 
9405     @java.lang.Override
9406     protected Builder newBuilderForType(
9407         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
9408       Builder builder = new Builder(parent);
9409       return builder;
9410     }
9411     /**
9412      * Protobuf type {@code hbase.pb.FlushRegionRequest}
9413      *
9414      * <pre>
9415      **
9416      * Flushes the MemStore of the specified region.
9417      * &lt;p&gt;
9418      * This method is synchronous.
9419      * </pre>
9420      */
9421     public static final class Builder extends
9422         com.google.protobuf.GeneratedMessage.Builder<Builder>
9423        implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequestOrBuilder {
9424       public static final com.google.protobuf.Descriptors.Descriptor
9425           getDescriptor() {
9426         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_FlushRegionRequest_descriptor;
9427       }
9428 
9429       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
9430           internalGetFieldAccessorTable() {
9431         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_FlushRegionRequest_fieldAccessorTable
9432             .ensureFieldAccessorsInitialized(
9433                 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.Builder.class);
9434       }
9435 
9436       // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.newBuilder()
9437       private Builder() {
9438         maybeForceBuilderInitialization();
9439       }
9440 
9441       private Builder(
9442           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
9443         super(parent);
9444         maybeForceBuilderInitialization();
9445       }
9446       private void maybeForceBuilderInitialization() {
9447         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
9448           getRegionFieldBuilder();
9449         }
9450       }
9451       private static Builder create() {
9452         return new Builder();
9453       }
9454 
9455       public Builder clear() {
9456         super.clear();
9457         if (regionBuilder_ == null) {
9458           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
9459         } else {
9460           regionBuilder_.clear();
9461         }
9462         bitField0_ = (bitField0_ & ~0x00000001);
9463         ifOlderThanTs_ = 0L;
9464         bitField0_ = (bitField0_ & ~0x00000002);
9465         writeFlushWalMarker_ = false;
9466         bitField0_ = (bitField0_ & ~0x00000004);
9467         return this;
9468       }
9469 
9470       public Builder clone() {
9471         return create().mergeFrom(buildPartial());
9472       }
9473 
9474       public com.google.protobuf.Descriptors.Descriptor
9475           getDescriptorForType() {
9476         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_FlushRegionRequest_descriptor;
9477       }
9478 
9479       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest getDefaultInstanceForType() {
9480         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.getDefaultInstance();
9481       }
9482 
9483       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest build() {
9484         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest result = buildPartial();
9485         if (!result.isInitialized()) {
9486           throw newUninitializedMessageException(result);
9487         }
9488         return result;
9489       }
9490 
9491       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest buildPartial() {
9492         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest(this);
9493         int from_bitField0_ = bitField0_;
9494         int to_bitField0_ = 0;
9495         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
9496           to_bitField0_ |= 0x00000001;
9497         }
9498         if (regionBuilder_ == null) {
9499           result.region_ = region_;
9500         } else {
9501           result.region_ = regionBuilder_.build();
9502         }
9503         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
9504           to_bitField0_ |= 0x00000002;
9505         }
9506         result.ifOlderThanTs_ = ifOlderThanTs_;
9507         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
9508           to_bitField0_ |= 0x00000004;
9509         }
9510         result.writeFlushWalMarker_ = writeFlushWalMarker_;
9511         result.bitField0_ = to_bitField0_;
9512         onBuilt();
9513         return result;
9514       }
9515 
9516       public Builder mergeFrom(com.google.protobuf.Message other) {
9517         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest) {
9518           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest)other);
9519         } else {
9520           super.mergeFrom(other);
9521           return this;
9522         }
9523       }
9524 
9525       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest other) {
9526         if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.getDefaultInstance()) return this;
9527         if (other.hasRegion()) {
9528           mergeRegion(other.getRegion());
9529         }
9530         if (other.hasIfOlderThanTs()) {
9531           setIfOlderThanTs(other.getIfOlderThanTs());
9532         }
9533         if (other.hasWriteFlushWalMarker()) {
9534           setWriteFlushWalMarker(other.getWriteFlushWalMarker());
9535         }
9536         this.mergeUnknownFields(other.getUnknownFields());
9537         return this;
9538       }
9539 
9540       public final boolean isInitialized() {
9541         if (!hasRegion()) {
9542           
9543           return false;
9544         }
9545         if (!getRegion().isInitialized()) {
9546           
9547           return false;
9548         }
9549         return true;
9550       }
9551 
9552       public Builder mergeFrom(
9553           com.google.protobuf.CodedInputStream input,
9554           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9555           throws java.io.IOException {
9556         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parsedMessage = null;
9557         try {
9558           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
9559         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
9560           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest) e.getUnfinishedMessage();
9561           throw e;
9562         } finally {
9563           if (parsedMessage != null) {
9564             mergeFrom(parsedMessage);
9565           }
9566         }
9567         return this;
9568       }
9569       private int bitField0_;
9570 
9571       // required .hbase.pb.RegionSpecifier region = 1;
9572       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
9573       private com.google.protobuf.SingleFieldBuilder<
9574           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_;
9575       /**
9576        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
9577        */
9578       public boolean hasRegion() {
9579         return ((bitField0_ & 0x00000001) == 0x00000001);
9580       }
9581       /**
9582        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
9583        */
9584       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
9585         if (regionBuilder_ == null) {
9586           return region_;
9587         } else {
9588           return regionBuilder_.getMessage();
9589         }
9590       }
9591       /**
9592        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
9593        */
9594       public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
9595         if (regionBuilder_ == null) {
9596           if (value == null) {
9597             throw new NullPointerException();
9598           }
9599           region_ = value;
9600           onChanged();
9601         } else {
9602           regionBuilder_.setMessage(value);
9603         }
9604         bitField0_ |= 0x00000001;
9605         return this;
9606       }
9607       /**
9608        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
9609        */
9610       public Builder setRegion(
9611           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
9612         if (regionBuilder_ == null) {
9613           region_ = builderForValue.build();
9614           onChanged();
9615         } else {
9616           regionBuilder_.setMessage(builderForValue.build());
9617         }
9618         bitField0_ |= 0x00000001;
9619         return this;
9620       }
9621       /**
9622        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
9623        */
9624       public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
9625         if (regionBuilder_ == null) {
9626           if (((bitField0_ & 0x00000001) == 0x00000001) &&
9627               region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) {
9628             region_ =
9629               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial();
9630           } else {
9631             region_ = value;
9632           }
9633           onChanged();
9634         } else {
9635           regionBuilder_.mergeFrom(value);
9636         }
9637         bitField0_ |= 0x00000001;
9638         return this;
9639       }
9640       /**
9641        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
9642        */
9643       public Builder clearRegion() {
9644         if (regionBuilder_ == null) {
9645           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
9646           onChanged();
9647         } else {
9648           regionBuilder_.clear();
9649         }
9650         bitField0_ = (bitField0_ & ~0x00000001);
9651         return this;
9652       }
9653       /**
9654        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
9655        */
9656       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() {
9657         bitField0_ |= 0x00000001;
9658         onChanged();
9659         return getRegionFieldBuilder().getBuilder();
9660       }
9661       /**
9662        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
9663        */
9664       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
9665         if (regionBuilder_ != null) {
9666           return regionBuilder_.getMessageOrBuilder();
9667         } else {
9668           return region_;
9669         }
9670       }
9671       /**
9672        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
9673        */
9674       private com.google.protobuf.SingleFieldBuilder<
9675           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> 
9676           getRegionFieldBuilder() {
9677         if (regionBuilder_ == null) {
9678           regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
9679               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
9680                   region_,
9681                   getParentForChildren(),
9682                   isClean());
9683           region_ = null;
9684         }
9685         return regionBuilder_;
9686       }
9687 
9688       // optional uint64 if_older_than_ts = 2;
9689       private long ifOlderThanTs_ ;
9690       /**
9691        * <code>optional uint64 if_older_than_ts = 2;</code>
9692        */
9693       public boolean hasIfOlderThanTs() {
9694         return ((bitField0_ & 0x00000002) == 0x00000002);
9695       }
9696       /**
9697        * <code>optional uint64 if_older_than_ts = 2;</code>
9698        */
9699       public long getIfOlderThanTs() {
9700         return ifOlderThanTs_;
9701       }
9702       /**
9703        * <code>optional uint64 if_older_than_ts = 2;</code>
9704        */
9705       public Builder setIfOlderThanTs(long value) {
9706         bitField0_ |= 0x00000002;
9707         ifOlderThanTs_ = value;
9708         onChanged();
9709         return this;
9710       }
9711       /**
9712        * <code>optional uint64 if_older_than_ts = 2;</code>
9713        */
9714       public Builder clearIfOlderThanTs() {
9715         bitField0_ = (bitField0_ & ~0x00000002);
9716         ifOlderThanTs_ = 0L;
9717         onChanged();
9718         return this;
9719       }
9720 
9721       // optional bool write_flush_wal_marker = 3;
9722       private boolean writeFlushWalMarker_ ;
9723       /**
9724        * <code>optional bool write_flush_wal_marker = 3;</code>
9725        *
9726        * <pre>
9727        * whether to write a marker to WAL even if not flushed
9728        * </pre>
9729        */
9730       public boolean hasWriteFlushWalMarker() {
9731         return ((bitField0_ & 0x00000004) == 0x00000004);
9732       }
9733       /**
9734        * <code>optional bool write_flush_wal_marker = 3;</code>
9735        *
9736        * <pre>
9737        * whether to write a marker to WAL even if not flushed
9738        * </pre>
9739        */
9740       public boolean getWriteFlushWalMarker() {
9741         return writeFlushWalMarker_;
9742       }
9743       /**
9744        * <code>optional bool write_flush_wal_marker = 3;</code>
9745        *
9746        * <pre>
9747        * whether to write a marker to WAL even if not flushed
9748        * </pre>
9749        */
9750       public Builder setWriteFlushWalMarker(boolean value) {
9751         bitField0_ |= 0x00000004;
9752         writeFlushWalMarker_ = value;
9753         onChanged();
9754         return this;
9755       }
9756       /**
9757        * <code>optional bool write_flush_wal_marker = 3;</code>
9758        *
9759        * <pre>
9760        * whether to write a marker to WAL even if not flushed
9761        * </pre>
9762        */
9763       public Builder clearWriteFlushWalMarker() {
9764         bitField0_ = (bitField0_ & ~0x00000004);
9765         writeFlushWalMarker_ = false;
9766         onChanged();
9767         return this;
9768       }
9769 
9770       // @@protoc_insertion_point(builder_scope:hbase.pb.FlushRegionRequest)
9771     }
9772 
9773     static {
9774       defaultInstance = new FlushRegionRequest(true);
9775       defaultInstance.initFields();
9776     }
9777 
9778     // @@protoc_insertion_point(class_scope:hbase.pb.FlushRegionRequest)
9779   }
9780 
9781   public interface FlushRegionResponseOrBuilder
9782       extends com.google.protobuf.MessageOrBuilder {
9783 
9784     // required uint64 last_flush_time = 1;
9785     /**
9786      * <code>required uint64 last_flush_time = 1;</code>
9787      */
9788     boolean hasLastFlushTime();
9789     /**
9790      * <code>required uint64 last_flush_time = 1;</code>
9791      */
9792     long getLastFlushTime();
9793 
9794     // optional bool flushed = 2;
9795     /**
9796      * <code>optional bool flushed = 2;</code>
9797      */
9798     boolean hasFlushed();
9799     /**
9800      * <code>optional bool flushed = 2;</code>
9801      */
9802     boolean getFlushed();
9803 
9804     // optional bool wrote_flush_wal_marker = 3;
9805     /**
9806      * <code>optional bool wrote_flush_wal_marker = 3;</code>
9807      */
9808     boolean hasWroteFlushWalMarker();
9809     /**
9810      * <code>optional bool wrote_flush_wal_marker = 3;</code>
9811      */
9812     boolean getWroteFlushWalMarker();
9813   }
9814   /**
9815    * Protobuf type {@code hbase.pb.FlushRegionResponse}
9816    */
9817   public static final class FlushRegionResponse extends
9818       com.google.protobuf.GeneratedMessage
9819       implements FlushRegionResponseOrBuilder {
9820     // Use FlushRegionResponse.newBuilder() to construct.
9821     private FlushRegionResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
9822       super(builder);
9823       this.unknownFields = builder.getUnknownFields();
9824     }
9825     private FlushRegionResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
9826 
9827     private static final FlushRegionResponse defaultInstance;
9828     public static FlushRegionResponse getDefaultInstance() {
9829       return defaultInstance;
9830     }
9831 
9832     public FlushRegionResponse getDefaultInstanceForType() {
9833       return defaultInstance;
9834     }
9835 
9836     private final com.google.protobuf.UnknownFieldSet unknownFields;
9837     @java.lang.Override
9838     public final com.google.protobuf.UnknownFieldSet
9839         getUnknownFields() {
9840       return this.unknownFields;
9841     }
9842     private FlushRegionResponse(
9843         com.google.protobuf.CodedInputStream input,
9844         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9845         throws com.google.protobuf.InvalidProtocolBufferException {
9846       initFields();
9847       int mutable_bitField0_ = 0;
9848       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
9849           com.google.protobuf.UnknownFieldSet.newBuilder();
9850       try {
9851         boolean done = false;
9852         while (!done) {
9853           int tag = input.readTag();
9854           switch (tag) {
9855             case 0:
9856               done = true;
9857               break;
9858             default: {
9859               if (!parseUnknownField(input, unknownFields,
9860                                      extensionRegistry, tag)) {
9861                 done = true;
9862               }
9863               break;
9864             }
9865             case 8: {
9866               bitField0_ |= 0x00000001;
9867               lastFlushTime_ = input.readUInt64();
9868               break;
9869             }
9870             case 16: {
9871               bitField0_ |= 0x00000002;
9872               flushed_ = input.readBool();
9873               break;
9874             }
9875             case 24: {
9876               bitField0_ |= 0x00000004;
9877               wroteFlushWalMarker_ = input.readBool();
9878               break;
9879             }
9880           }
9881         }
9882       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
9883         throw e.setUnfinishedMessage(this);
9884       } catch (java.io.IOException e) {
9885         throw new com.google.protobuf.InvalidProtocolBufferException(
9886             e.getMessage()).setUnfinishedMessage(this);
9887       } finally {
9888         this.unknownFields = unknownFields.build();
9889         makeExtensionsImmutable();
9890       }
9891     }
9892     public static final com.google.protobuf.Descriptors.Descriptor
9893         getDescriptor() {
9894       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_FlushRegionResponse_descriptor;
9895     }
9896 
9897     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
9898         internalGetFieldAccessorTable() {
9899       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_FlushRegionResponse_fieldAccessorTable
9900           .ensureFieldAccessorsInitialized(
9901               org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.Builder.class);
9902     }
9903 
9904     public static com.google.protobuf.Parser<FlushRegionResponse> PARSER =
9905         new com.google.protobuf.AbstractParser<FlushRegionResponse>() {
9906       public FlushRegionResponse parsePartialFrom(
9907           com.google.protobuf.CodedInputStream input,
9908           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9909           throws com.google.protobuf.InvalidProtocolBufferException {
9910         return new FlushRegionResponse(input, extensionRegistry);
9911       }
9912     };
9913 
9914     @java.lang.Override
9915     public com.google.protobuf.Parser<FlushRegionResponse> getParserForType() {
9916       return PARSER;
9917     }
9918 
9919     private int bitField0_;
9920     // required uint64 last_flush_time = 1;
9921     public static final int LAST_FLUSH_TIME_FIELD_NUMBER = 1;
9922     private long lastFlushTime_;
9923     /**
9924      * <code>required uint64 last_flush_time = 1;</code>
9925      */
9926     public boolean hasLastFlushTime() {
9927       return ((bitField0_ & 0x00000001) == 0x00000001);
9928     }
9929     /**
9930      * <code>required uint64 last_flush_time = 1;</code>
9931      */
9932     public long getLastFlushTime() {
9933       return lastFlushTime_;
9934     }
9935 
9936     // optional bool flushed = 2;
9937     public static final int FLUSHED_FIELD_NUMBER = 2;
9938     private boolean flushed_;
9939     /**
9940      * <code>optional bool flushed = 2;</code>
9941      */
9942     public boolean hasFlushed() {
9943       return ((bitField0_ & 0x00000002) == 0x00000002);
9944     }
9945     /**
9946      * <code>optional bool flushed = 2;</code>
9947      */
9948     public boolean getFlushed() {
9949       return flushed_;
9950     }
9951 
9952     // optional bool wrote_flush_wal_marker = 3;
9953     public static final int WROTE_FLUSH_WAL_MARKER_FIELD_NUMBER = 3;
9954     private boolean wroteFlushWalMarker_;
9955     /**
9956      * <code>optional bool wrote_flush_wal_marker = 3;</code>
9957      */
9958     public boolean hasWroteFlushWalMarker() {
9959       return ((bitField0_ & 0x00000004) == 0x00000004);
9960     }
9961     /**
9962      * <code>optional bool wrote_flush_wal_marker = 3;</code>
9963      */
9964     public boolean getWroteFlushWalMarker() {
9965       return wroteFlushWalMarker_;
9966     }
9967 
9968     private void initFields() {
9969       lastFlushTime_ = 0L;
9970       flushed_ = false;
9971       wroteFlushWalMarker_ = false;
9972     }
9973     private byte memoizedIsInitialized = -1;
9974     public final boolean isInitialized() {
9975       byte isInitialized = memoizedIsInitialized;
9976       if (isInitialized != -1) return isInitialized == 1;
9977 
9978       if (!hasLastFlushTime()) {
9979         memoizedIsInitialized = 0;
9980         return false;
9981       }
9982       memoizedIsInitialized = 1;
9983       return true;
9984     }
9985 
9986     public void writeTo(com.google.protobuf.CodedOutputStream output)
9987                         throws java.io.IOException {
9988       getSerializedSize();
9989       if (((bitField0_ & 0x00000001) == 0x00000001)) {
9990         output.writeUInt64(1, lastFlushTime_);
9991       }
9992       if (((bitField0_ & 0x00000002) == 0x00000002)) {
9993         output.writeBool(2, flushed_);
9994       }
9995       if (((bitField0_ & 0x00000004) == 0x00000004)) {
9996         output.writeBool(3, wroteFlushWalMarker_);
9997       }
9998       getUnknownFields().writeTo(output);
9999     }
10000 
10001     private int memoizedSerializedSize = -1;
10002     public int getSerializedSize() {
10003       int size = memoizedSerializedSize;
10004       if (size != -1) return size;
10005 
10006       size = 0;
10007       if (((bitField0_ & 0x00000001) == 0x00000001)) {
10008         size += com.google.protobuf.CodedOutputStream
10009           .computeUInt64Size(1, lastFlushTime_);
10010       }
10011       if (((bitField0_ & 0x00000002) == 0x00000002)) {
10012         size += com.google.protobuf.CodedOutputStream
10013           .computeBoolSize(2, flushed_);
10014       }
10015       if (((bitField0_ & 0x00000004) == 0x00000004)) {
10016         size += com.google.protobuf.CodedOutputStream
10017           .computeBoolSize(3, wroteFlushWalMarker_);
10018       }
10019       size += getUnknownFields().getSerializedSize();
10020       memoizedSerializedSize = size;
10021       return size;
10022     }
10023 
10024     private static final long serialVersionUID = 0L;
10025     @java.lang.Override
10026     protected java.lang.Object writeReplace()
10027         throws java.io.ObjectStreamException {
10028       return super.writeReplace();
10029     }
10030 
10031     @java.lang.Override
10032     public boolean equals(final java.lang.Object obj) {
10033       if (obj == this) {
10034        return true;
10035       }
10036       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse)) {
10037         return super.equals(obj);
10038       }
10039       org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse) obj;
10040 
10041       boolean result = true;
10042       result = result && (hasLastFlushTime() == other.hasLastFlushTime());
10043       if (hasLastFlushTime()) {
10044         result = result && (getLastFlushTime()
10045             == other.getLastFlushTime());
10046       }
10047       result = result && (hasFlushed() == other.hasFlushed());
10048       if (hasFlushed()) {
10049         result = result && (getFlushed()
10050             == other.getFlushed());
10051       }
10052       result = result && (hasWroteFlushWalMarker() == other.hasWroteFlushWalMarker());
10053       if (hasWroteFlushWalMarker()) {
10054         result = result && (getWroteFlushWalMarker()
10055             == other.getWroteFlushWalMarker());
10056       }
10057       result = result &&
10058           getUnknownFields().equals(other.getUnknownFields());
10059       return result;
10060     }
10061 
10062     private int memoizedHashCode = 0;
10063     @java.lang.Override
10064     public int hashCode() {
10065       if (memoizedHashCode != 0) {
10066         return memoizedHashCode;
10067       }
10068       int hash = 41;
10069       hash = (19 * hash) + getDescriptorForType().hashCode();
10070       if (hasLastFlushTime()) {
10071         hash = (37 * hash) + LAST_FLUSH_TIME_FIELD_NUMBER;
10072         hash = (53 * hash) + hashLong(getLastFlushTime());
10073       }
10074       if (hasFlushed()) {
10075         hash = (37 * hash) + FLUSHED_FIELD_NUMBER;
10076         hash = (53 * hash) + hashBoolean(getFlushed());
10077       }
10078       if (hasWroteFlushWalMarker()) {
10079         hash = (37 * hash) + WROTE_FLUSH_WAL_MARKER_FIELD_NUMBER;
10080         hash = (53 * hash) + hashBoolean(getWroteFlushWalMarker());
10081       }
10082       hash = (29 * hash) + getUnknownFields().hashCode();
10083       memoizedHashCode = hash;
10084       return hash;
10085     }
10086 
10087     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom(
10088         com.google.protobuf.ByteString data)
10089         throws com.google.protobuf.InvalidProtocolBufferException {
10090       return PARSER.parseFrom(data);
10091     }
10092     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom(
10093         com.google.protobuf.ByteString data,
10094         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10095         throws com.google.protobuf.InvalidProtocolBufferException {
10096       return PARSER.parseFrom(data, extensionRegistry);
10097     }
10098     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom(byte[] data)
10099         throws com.google.protobuf.InvalidProtocolBufferException {
10100       return PARSER.parseFrom(data);
10101     }
10102     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom(
10103         byte[] data,
10104         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10105         throws com.google.protobuf.InvalidProtocolBufferException {
10106       return PARSER.parseFrom(data, extensionRegistry);
10107     }
10108     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom(java.io.InputStream input)
10109         throws java.io.IOException {
10110       return PARSER.parseFrom(input);
10111     }
10112     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom(
10113         java.io.InputStream input,
10114         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10115         throws java.io.IOException {
10116       return PARSER.parseFrom(input, extensionRegistry);
10117     }
10118     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseDelimitedFrom(java.io.InputStream input)
10119         throws java.io.IOException {
10120       return PARSER.parseDelimitedFrom(input);
10121     }
10122     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseDelimitedFrom(
10123         java.io.InputStream input,
10124         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10125         throws java.io.IOException {
10126       return PARSER.parseDelimitedFrom(input, extensionRegistry);
10127     }
10128     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom(
10129         com.google.protobuf.CodedInputStream input)
10130         throws java.io.IOException {
10131       return PARSER.parseFrom(input);
10132     }
10133     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom(
10134         com.google.protobuf.CodedInputStream input,
10135         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10136         throws java.io.IOException {
10137       return PARSER.parseFrom(input, extensionRegistry);
10138     }
10139 
10140     public static Builder newBuilder() { return Builder.create(); }
10141     public Builder newBuilderForType() { return newBuilder(); }
10142     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse prototype) {
10143       return newBuilder().mergeFrom(prototype);
10144     }
10145     public Builder toBuilder() { return newBuilder(this); }
10146 
10147     @java.lang.Override
10148     protected Builder newBuilderForType(
10149         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
10150       Builder builder = new Builder(parent);
10151       return builder;
10152     }
10153     /**
10154      * Protobuf type {@code hbase.pb.FlushRegionResponse}
10155      */
10156     public static final class Builder extends
10157         com.google.protobuf.GeneratedMessage.Builder<Builder>
10158        implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponseOrBuilder {
10159       public static final com.google.protobuf.Descriptors.Descriptor
10160           getDescriptor() {
10161         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_FlushRegionResponse_descriptor;
10162       }
10163 
10164       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
10165           internalGetFieldAccessorTable() {
10166         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_FlushRegionResponse_fieldAccessorTable
10167             .ensureFieldAccessorsInitialized(
10168                 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.Builder.class);
10169       }
10170 
10171       // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.newBuilder()
10172       private Builder() {
10173         maybeForceBuilderInitialization();
10174       }
10175 
10176       private Builder(
10177           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
10178         super(parent);
10179         maybeForceBuilderInitialization();
10180       }
10181       private void maybeForceBuilderInitialization() {
10182         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
10183         }
10184       }
10185       private static Builder create() {
10186         return new Builder();
10187       }
10188 
10189       public Builder clear() {
10190         super.clear();
10191         lastFlushTime_ = 0L;
10192         bitField0_ = (bitField0_ & ~0x00000001);
10193         flushed_ = false;
10194         bitField0_ = (bitField0_ & ~0x00000002);
10195         wroteFlushWalMarker_ = false;
10196         bitField0_ = (bitField0_ & ~0x00000004);
10197         return this;
10198       }
10199 
10200       public Builder clone() {
10201         return create().mergeFrom(buildPartial());
10202       }
10203 
10204       public com.google.protobuf.Descriptors.Descriptor
10205           getDescriptorForType() {
10206         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_FlushRegionResponse_descriptor;
10207       }
10208 
10209       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse getDefaultInstanceForType() {
10210         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDefaultInstance();
10211       }
10212 
10213       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse build() {
10214         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse result = buildPartial();
10215         if (!result.isInitialized()) {
10216           throw newUninitializedMessageException(result);
10217         }
10218         return result;
10219       }
10220 
10221       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse buildPartial() {
10222         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse(this);
10223         int from_bitField0_ = bitField0_;
10224         int to_bitField0_ = 0;
10225         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
10226           to_bitField0_ |= 0x00000001;
10227         }
10228         result.lastFlushTime_ = lastFlushTime_;
10229         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
10230           to_bitField0_ |= 0x00000002;
10231         }
10232         result.flushed_ = flushed_;
10233         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
10234           to_bitField0_ |= 0x00000004;
10235         }
10236         result.wroteFlushWalMarker_ = wroteFlushWalMarker_;
10237         result.bitField0_ = to_bitField0_;
10238         onBuilt();
10239         return result;
10240       }
10241 
10242       public Builder mergeFrom(com.google.protobuf.Message other) {
10243         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse) {
10244           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse)other);
10245         } else {
10246           super.mergeFrom(other);
10247           return this;
10248         }
10249       }
10250 
10251       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse other) {
10252         if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDefaultInstance()) return this;
10253         if (other.hasLastFlushTime()) {
10254           setLastFlushTime(other.getLastFlushTime());
10255         }
10256         if (other.hasFlushed()) {
10257           setFlushed(other.getFlushed());
10258         }
10259         if (other.hasWroteFlushWalMarker()) {
10260           setWroteFlushWalMarker(other.getWroteFlushWalMarker());
10261         }
10262         this.mergeUnknownFields(other.getUnknownFields());
10263         return this;
10264       }
10265 
10266       public final boolean isInitialized() {
10267         if (!hasLastFlushTime()) {
10268           
10269           return false;
10270         }
10271         return true;
10272       }
10273 
10274       public Builder mergeFrom(
10275           com.google.protobuf.CodedInputStream input,
10276           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10277           throws java.io.IOException {
10278         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parsedMessage = null;
10279         try {
10280           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
10281         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
10282           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse) e.getUnfinishedMessage();
10283           throw e;
10284         } finally {
10285           if (parsedMessage != null) {
10286             mergeFrom(parsedMessage);
10287           }
10288         }
10289         return this;
10290       }
10291       private int bitField0_;
10292 
10293       // required uint64 last_flush_time = 1;
10294       private long lastFlushTime_ ;
10295       /**
10296        * <code>required uint64 last_flush_time = 1;</code>
10297        */
10298       public boolean hasLastFlushTime() {
10299         return ((bitField0_ & 0x00000001) == 0x00000001);
10300       }
10301       /**
10302        * <code>required uint64 last_flush_time = 1;</code>
10303        */
10304       public long getLastFlushTime() {
10305         return lastFlushTime_;
10306       }
10307       /**
10308        * <code>required uint64 last_flush_time = 1;</code>
10309        */
10310       public Builder setLastFlushTime(long value) {
10311         bitField0_ |= 0x00000001;
10312         lastFlushTime_ = value;
10313         onChanged();
10314         return this;
10315       }
10316       /**
10317        * <code>required uint64 last_flush_time = 1;</code>
10318        */
10319       public Builder clearLastFlushTime() {
10320         bitField0_ = (bitField0_ & ~0x00000001);
10321         lastFlushTime_ = 0L;
10322         onChanged();
10323         return this;
10324       }
10325 
10326       // optional bool flushed = 2;
10327       private boolean flushed_ ;
10328       /**
10329        * <code>optional bool flushed = 2;</code>
10330        */
10331       public boolean hasFlushed() {
10332         return ((bitField0_ & 0x00000002) == 0x00000002);
10333       }
10334       /**
10335        * <code>optional bool flushed = 2;</code>
10336        */
10337       public boolean getFlushed() {
10338         return flushed_;
10339       }
10340       /**
10341        * <code>optional bool flushed = 2;</code>
10342        */
10343       public Builder setFlushed(boolean value) {
10344         bitField0_ |= 0x00000002;
10345         flushed_ = value;
10346         onChanged();
10347         return this;
10348       }
10349       /**
10350        * <code>optional bool flushed = 2;</code>
10351        */
10352       public Builder clearFlushed() {
10353         bitField0_ = (bitField0_ & ~0x00000002);
10354         flushed_ = false;
10355         onChanged();
10356         return this;
10357       }
10358 
10359       // optional bool wrote_flush_wal_marker = 3;
10360       private boolean wroteFlushWalMarker_ ;
10361       /**
10362        * <code>optional bool wrote_flush_wal_marker = 3;</code>
10363        */
10364       public boolean hasWroteFlushWalMarker() {
10365         return ((bitField0_ & 0x00000004) == 0x00000004);
10366       }
10367       /**
10368        * <code>optional bool wrote_flush_wal_marker = 3;</code>
10369        */
10370       public boolean getWroteFlushWalMarker() {
10371         return wroteFlushWalMarker_;
10372       }
10373       /**
10374        * <code>optional bool wrote_flush_wal_marker = 3;</code>
10375        */
10376       public Builder setWroteFlushWalMarker(boolean value) {
10377         bitField0_ |= 0x00000004;
10378         wroteFlushWalMarker_ = value;
10379         onChanged();
10380         return this;
10381       }
10382       /**
10383        * <code>optional bool wrote_flush_wal_marker = 3;</code>
10384        */
10385       public Builder clearWroteFlushWalMarker() {
10386         bitField0_ = (bitField0_ & ~0x00000004);
10387         wroteFlushWalMarker_ = false;
10388         onChanged();
10389         return this;
10390       }
10391 
10392       // @@protoc_insertion_point(builder_scope:hbase.pb.FlushRegionResponse)
10393     }
10394 
10395     static {
10396       defaultInstance = new FlushRegionResponse(true);
10397       defaultInstance.initFields();
10398     }
10399 
10400     // @@protoc_insertion_point(class_scope:hbase.pb.FlushRegionResponse)
10401   }
10402 
10403   public interface SplitRegionRequestOrBuilder
10404       extends com.google.protobuf.MessageOrBuilder {
10405 
10406     // required .hbase.pb.RegionSpecifier region = 1;
10407     /**
10408      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
10409      */
10410     boolean hasRegion();
10411     /**
10412      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
10413      */
10414     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
10415     /**
10416      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
10417      */
10418     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
10419 
10420     // optional bytes split_point = 2;
10421     /**
10422      * <code>optional bytes split_point = 2;</code>
10423      */
10424     boolean hasSplitPoint();
10425     /**
10426      * <code>optional bytes split_point = 2;</code>
10427      */
10428     com.google.protobuf.ByteString getSplitPoint();
10429   }
10430   /**
10431    * Protobuf type {@code hbase.pb.SplitRegionRequest}
10432    *
10433    * <pre>
10434    **
10435    * Splits the specified region.
10436    * &lt;p&gt;
10437    * This method currently flushes the region and then forces a compaction which
10438    * will then trigger a split.  The flush is done synchronously but the
10439    * compaction is asynchronous.
10440    * </pre>
10441    */
10442   public static final class SplitRegionRequest extends
10443       com.google.protobuf.GeneratedMessage
10444       implements SplitRegionRequestOrBuilder {
10445     // Use SplitRegionRequest.newBuilder() to construct.
10446     private SplitRegionRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
10447       super(builder);
10448       this.unknownFields = builder.getUnknownFields();
10449     }
10450     private SplitRegionRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
10451 
10452     private static final SplitRegionRequest defaultInstance;
10453     public static SplitRegionRequest getDefaultInstance() {
10454       return defaultInstance;
10455     }
10456 
10457     public SplitRegionRequest getDefaultInstanceForType() {
10458       return defaultInstance;
10459     }
10460 
10461     private final com.google.protobuf.UnknownFieldSet unknownFields;
10462     @java.lang.Override
10463     public final com.google.protobuf.UnknownFieldSet
10464         getUnknownFields() {
10465       return this.unknownFields;
10466     }
10467     private SplitRegionRequest(
10468         com.google.protobuf.CodedInputStream input,
10469         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10470         throws com.google.protobuf.InvalidProtocolBufferException {
10471       initFields();
10472       int mutable_bitField0_ = 0;
10473       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
10474           com.google.protobuf.UnknownFieldSet.newBuilder();
10475       try {
10476         boolean done = false;
10477         while (!done) {
10478           int tag = input.readTag();
10479           switch (tag) {
10480             case 0:
10481               done = true;
10482               break;
10483             default: {
10484               if (!parseUnknownField(input, unknownFields,
10485                                      extensionRegistry, tag)) {
10486                 done = true;
10487               }
10488               break;
10489             }
10490             case 10: {
10491               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null;
10492               if (((bitField0_ & 0x00000001) == 0x00000001)) {
10493                 subBuilder = region_.toBuilder();
10494               }
10495               region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry);
10496               if (subBuilder != null) {
10497                 subBuilder.mergeFrom(region_);
10498                 region_ = subBuilder.buildPartial();
10499               }
10500               bitField0_ |= 0x00000001;
10501               break;
10502             }
10503             case 18: {
10504               bitField0_ |= 0x00000002;
10505               splitPoint_ = input.readBytes();
10506               break;
10507             }
10508           }
10509         }
10510       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
10511         throw e.setUnfinishedMessage(this);
10512       } catch (java.io.IOException e) {
10513         throw new com.google.protobuf.InvalidProtocolBufferException(
10514             e.getMessage()).setUnfinishedMessage(this);
10515       } finally {
10516         this.unknownFields = unknownFields.build();
10517         makeExtensionsImmutable();
10518       }
10519     }
10520     public static final com.google.protobuf.Descriptors.Descriptor
10521         getDescriptor() {
10522       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_SplitRegionRequest_descriptor;
10523     }
10524 
10525     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
10526         internalGetFieldAccessorTable() {
10527       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_SplitRegionRequest_fieldAccessorTable
10528           .ensureFieldAccessorsInitialized(
10529               org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.Builder.class);
10530     }
10531 
10532     public static com.google.protobuf.Parser<SplitRegionRequest> PARSER =
10533         new com.google.protobuf.AbstractParser<SplitRegionRequest>() {
10534       public SplitRegionRequest parsePartialFrom(
10535           com.google.protobuf.CodedInputStream input,
10536           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10537           throws com.google.protobuf.InvalidProtocolBufferException {
10538         return new SplitRegionRequest(input, extensionRegistry);
10539       }
10540     };
10541 
10542     @java.lang.Override
10543     public com.google.protobuf.Parser<SplitRegionRequest> getParserForType() {
10544       return PARSER;
10545     }
10546 
10547     private int bitField0_;
10548     // required .hbase.pb.RegionSpecifier region = 1;
10549     public static final int REGION_FIELD_NUMBER = 1;
10550     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_;
10551     /**
10552      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
10553      */
10554     public boolean hasRegion() {
10555       return ((bitField0_ & 0x00000001) == 0x00000001);
10556     }
10557     /**
10558      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
10559      */
10560     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
10561       return region_;
10562     }
10563     /**
10564      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
10565      */
10566     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
10567       return region_;
10568     }
10569 
10570     // optional bytes split_point = 2;
10571     public static final int SPLIT_POINT_FIELD_NUMBER = 2;
10572     private com.google.protobuf.ByteString splitPoint_;
10573     /**
10574      * <code>optional bytes split_point = 2;</code>
10575      */
10576     public boolean hasSplitPoint() {
10577       return ((bitField0_ & 0x00000002) == 0x00000002);
10578     }
10579     /**
10580      * <code>optional bytes split_point = 2;</code>
10581      */
10582     public com.google.protobuf.ByteString getSplitPoint() {
10583       return splitPoint_;
10584     }
10585 
10586     private void initFields() {
10587       region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
10588       splitPoint_ = com.google.protobuf.ByteString.EMPTY;
10589     }
10590     private byte memoizedIsInitialized = -1;
10591     public final boolean isInitialized() {
10592       byte isInitialized = memoizedIsInitialized;
10593       if (isInitialized != -1) return isInitialized == 1;
10594 
10595       if (!hasRegion()) {
10596         memoizedIsInitialized = 0;
10597         return false;
10598       }
10599       if (!getRegion().isInitialized()) {
10600         memoizedIsInitialized = 0;
10601         return false;
10602       }
10603       memoizedIsInitialized = 1;
10604       return true;
10605     }
10606 
10607     public void writeTo(com.google.protobuf.CodedOutputStream output)
10608                         throws java.io.IOException {
10609       getSerializedSize();
10610       if (((bitField0_ & 0x00000001) == 0x00000001)) {
10611         output.writeMessage(1, region_);
10612       }
10613       if (((bitField0_ & 0x00000002) == 0x00000002)) {
10614         output.writeBytes(2, splitPoint_);
10615       }
10616       getUnknownFields().writeTo(output);
10617     }
10618 
10619     private int memoizedSerializedSize = -1;
10620     public int getSerializedSize() {
10621       int size = memoizedSerializedSize;
10622       if (size != -1) return size;
10623 
10624       size = 0;
10625       if (((bitField0_ & 0x00000001) == 0x00000001)) {
10626         size += com.google.protobuf.CodedOutputStream
10627           .computeMessageSize(1, region_);
10628       }
10629       if (((bitField0_ & 0x00000002) == 0x00000002)) {
10630         size += com.google.protobuf.CodedOutputStream
10631           .computeBytesSize(2, splitPoint_);
10632       }
10633       size += getUnknownFields().getSerializedSize();
10634       memoizedSerializedSize = size;
10635       return size;
10636     }
10637 
10638     private static final long serialVersionUID = 0L;
10639     @java.lang.Override
10640     protected java.lang.Object writeReplace()
10641         throws java.io.ObjectStreamException {
10642       return super.writeReplace();
10643     }
10644 
10645     @java.lang.Override
10646     public boolean equals(final java.lang.Object obj) {
10647       if (obj == this) {
10648        return true;
10649       }
10650       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest)) {
10651         return super.equals(obj);
10652       }
10653       org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest) obj;
10654 
10655       boolean result = true;
10656       result = result && (hasRegion() == other.hasRegion());
10657       if (hasRegion()) {
10658         result = result && getRegion()
10659             .equals(other.getRegion());
10660       }
10661       result = result && (hasSplitPoint() == other.hasSplitPoint());
10662       if (hasSplitPoint()) {
10663         result = result && getSplitPoint()
10664             .equals(other.getSplitPoint());
10665       }
10666       result = result &&
10667           getUnknownFields().equals(other.getUnknownFields());
10668       return result;
10669     }
10670 
10671     private int memoizedHashCode = 0;
10672     @java.lang.Override
10673     public int hashCode() {
10674       if (memoizedHashCode != 0) {
10675         return memoizedHashCode;
10676       }
10677       int hash = 41;
10678       hash = (19 * hash) + getDescriptorForType().hashCode();
10679       if (hasRegion()) {
10680         hash = (37 * hash) + REGION_FIELD_NUMBER;
10681         hash = (53 * hash) + getRegion().hashCode();
10682       }
10683       if (hasSplitPoint()) {
10684         hash = (37 * hash) + SPLIT_POINT_FIELD_NUMBER;
10685         hash = (53 * hash) + getSplitPoint().hashCode();
10686       }
10687       hash = (29 * hash) + getUnknownFields().hashCode();
10688       memoizedHashCode = hash;
10689       return hash;
10690     }
10691 
10692     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom(
10693         com.google.protobuf.ByteString data)
10694         throws com.google.protobuf.InvalidProtocolBufferException {
10695       return PARSER.parseFrom(data);
10696     }
10697     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom(
10698         com.google.protobuf.ByteString data,
10699         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10700         throws com.google.protobuf.InvalidProtocolBufferException {
10701       return PARSER.parseFrom(data, extensionRegistry);
10702     }
10703     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom(byte[] data)
10704         throws com.google.protobuf.InvalidProtocolBufferException {
10705       return PARSER.parseFrom(data);
10706     }
10707     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom(
10708         byte[] data,
10709         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10710         throws com.google.protobuf.InvalidProtocolBufferException {
10711       return PARSER.parseFrom(data, extensionRegistry);
10712     }
10713     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom(java.io.InputStream input)
10714         throws java.io.IOException {
10715       return PARSER.parseFrom(input);
10716     }
10717     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom(
10718         java.io.InputStream input,
10719         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10720         throws java.io.IOException {
10721       return PARSER.parseFrom(input, extensionRegistry);
10722     }
10723     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseDelimitedFrom(java.io.InputStream input)
10724         throws java.io.IOException {
10725       return PARSER.parseDelimitedFrom(input);
10726     }
10727     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseDelimitedFrom(
10728         java.io.InputStream input,
10729         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10730         throws java.io.IOException {
10731       return PARSER.parseDelimitedFrom(input, extensionRegistry);
10732     }
10733     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom(
10734         com.google.protobuf.CodedInputStream input)
10735         throws java.io.IOException {
10736       return PARSER.parseFrom(input);
10737     }
10738     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom(
10739         com.google.protobuf.CodedInputStream input,
10740         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10741         throws java.io.IOException {
10742       return PARSER.parseFrom(input, extensionRegistry);
10743     }
10744 
10745     public static Builder newBuilder() { return Builder.create(); }
10746     public Builder newBuilderForType() { return newBuilder(); }
10747     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest prototype) {
10748       return newBuilder().mergeFrom(prototype);
10749     }
10750     public Builder toBuilder() { return newBuilder(this); }
10751 
10752     @java.lang.Override
10753     protected Builder newBuilderForType(
10754         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
10755       Builder builder = new Builder(parent);
10756       return builder;
10757     }
10758     /**
10759      * Protobuf type {@code hbase.pb.SplitRegionRequest}
10760      *
10761      * <pre>
10762      **
10763      * Splits the specified region.
10764      * &lt;p&gt;
10765      * This method currently flushes the region and then forces a compaction which
10766      * will then trigger a split.  The flush is done synchronously but the
10767      * compaction is asynchronous.
10768      * </pre>
10769      */
10770     public static final class Builder extends
10771         com.google.protobuf.GeneratedMessage.Builder<Builder>
10772        implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequestOrBuilder {
10773       public static final com.google.protobuf.Descriptors.Descriptor
10774           getDescriptor() {
10775         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_SplitRegionRequest_descriptor;
10776       }
10777 
10778       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
10779           internalGetFieldAccessorTable() {
10780         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_SplitRegionRequest_fieldAccessorTable
10781             .ensureFieldAccessorsInitialized(
10782                 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.Builder.class);
10783       }
10784 
10785       // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.newBuilder()
10786       private Builder() {
10787         maybeForceBuilderInitialization();
10788       }
10789 
10790       private Builder(
10791           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
10792         super(parent);
10793         maybeForceBuilderInitialization();
10794       }
10795       private void maybeForceBuilderInitialization() {
10796         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
10797           getRegionFieldBuilder();
10798         }
10799       }
10800       private static Builder create() {
10801         return new Builder();
10802       }
10803 
10804       public Builder clear() {
10805         super.clear();
10806         if (regionBuilder_ == null) {
10807           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
10808         } else {
10809           regionBuilder_.clear();
10810         }
10811         bitField0_ = (bitField0_ & ~0x00000001);
10812         splitPoint_ = com.google.protobuf.ByteString.EMPTY;
10813         bitField0_ = (bitField0_ & ~0x00000002);
10814         return this;
10815       }
10816 
10817       public Builder clone() {
10818         return create().mergeFrom(buildPartial());
10819       }
10820 
10821       public com.google.protobuf.Descriptors.Descriptor
10822           getDescriptorForType() {
10823         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_SplitRegionRequest_descriptor;
10824       }
10825 
10826       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest getDefaultInstanceForType() {
10827         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.getDefaultInstance();
10828       }
10829 
10830       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest build() {
10831         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest result = buildPartial();
10832         if (!result.isInitialized()) {
10833           throw newUninitializedMessageException(result);
10834         }
10835         return result;
10836       }
10837 
10838       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest buildPartial() {
10839         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest(this);
10840         int from_bitField0_ = bitField0_;
10841         int to_bitField0_ = 0;
10842         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
10843           to_bitField0_ |= 0x00000001;
10844         }
10845         if (regionBuilder_ == null) {
10846           result.region_ = region_;
10847         } else {
10848           result.region_ = regionBuilder_.build();
10849         }
10850         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
10851           to_bitField0_ |= 0x00000002;
10852         }
10853         result.splitPoint_ = splitPoint_;
10854         result.bitField0_ = to_bitField0_;
10855         onBuilt();
10856         return result;
10857       }
10858 
10859       public Builder mergeFrom(com.google.protobuf.Message other) {
10860         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest) {
10861           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest)other);
10862         } else {
10863           super.mergeFrom(other);
10864           return this;
10865         }
10866       }
10867 
10868       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest other) {
10869         if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.getDefaultInstance()) return this;
10870         if (other.hasRegion()) {
10871           mergeRegion(other.getRegion());
10872         }
10873         if (other.hasSplitPoint()) {
10874           setSplitPoint(other.getSplitPoint());
10875         }
10876         this.mergeUnknownFields(other.getUnknownFields());
10877         return this;
10878       }
10879 
10880       public final boolean isInitialized() {
10881         if (!hasRegion()) {
10882           
10883           return false;
10884         }
10885         if (!getRegion().isInitialized()) {
10886           
10887           return false;
10888         }
10889         return true;
10890       }
10891 
10892       public Builder mergeFrom(
10893           com.google.protobuf.CodedInputStream input,
10894           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10895           throws java.io.IOException {
10896         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parsedMessage = null;
10897         try {
10898           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
10899         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
10900           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest) e.getUnfinishedMessage();
10901           throw e;
10902         } finally {
10903           if (parsedMessage != null) {
10904             mergeFrom(parsedMessage);
10905           }
10906         }
10907         return this;
10908       }
10909       private int bitField0_;
10910 
10911       // required .hbase.pb.RegionSpecifier region = 1;
10912       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
10913       private com.google.protobuf.SingleFieldBuilder<
10914           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_;
10915       /**
10916        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
10917        */
10918       public boolean hasRegion() {
10919         return ((bitField0_ & 0x00000001) == 0x00000001);
10920       }
10921       /**
10922        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
10923        */
10924       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
10925         if (regionBuilder_ == null) {
10926           return region_;
10927         } else {
10928           return regionBuilder_.getMessage();
10929         }
10930       }
10931       /**
10932        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
10933        */
10934       public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
10935         if (regionBuilder_ == null) {
10936           if (value == null) {
10937             throw new NullPointerException();
10938           }
10939           region_ = value;
10940           onChanged();
10941         } else {
10942           regionBuilder_.setMessage(value);
10943         }
10944         bitField0_ |= 0x00000001;
10945         return this;
10946       }
10947       /**
10948        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
10949        */
10950       public Builder setRegion(
10951           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
10952         if (regionBuilder_ == null) {
10953           region_ = builderForValue.build();
10954           onChanged();
10955         } else {
10956           regionBuilder_.setMessage(builderForValue.build());
10957         }
10958         bitField0_ |= 0x00000001;
10959         return this;
10960       }
10961       /**
10962        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
10963        */
10964       public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
10965         if (regionBuilder_ == null) {
10966           if (((bitField0_ & 0x00000001) == 0x00000001) &&
10967               region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) {
10968             region_ =
10969               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial();
10970           } else {
10971             region_ = value;
10972           }
10973           onChanged();
10974         } else {
10975           regionBuilder_.mergeFrom(value);
10976         }
10977         bitField0_ |= 0x00000001;
10978         return this;
10979       }
10980       /**
10981        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
10982        */
10983       public Builder clearRegion() {
10984         if (regionBuilder_ == null) {
10985           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
10986           onChanged();
10987         } else {
10988           regionBuilder_.clear();
10989         }
10990         bitField0_ = (bitField0_ & ~0x00000001);
10991         return this;
10992       }
10993       /**
10994        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
10995        */
10996       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() {
10997         bitField0_ |= 0x00000001;
10998         onChanged();
10999         return getRegionFieldBuilder().getBuilder();
11000       }
11001       /**
11002        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
11003        */
11004       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
11005         if (regionBuilder_ != null) {
11006           return regionBuilder_.getMessageOrBuilder();
11007         } else {
11008           return region_;
11009         }
11010       }
11011       /**
11012        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
11013        */
11014       private com.google.protobuf.SingleFieldBuilder<
11015           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> 
11016           getRegionFieldBuilder() {
11017         if (regionBuilder_ == null) {
11018           regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
11019               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
11020                   region_,
11021                   getParentForChildren(),
11022                   isClean());
11023           region_ = null;
11024         }
11025         return regionBuilder_;
11026       }
11027 
11028       // optional bytes split_point = 2;
11029       private com.google.protobuf.ByteString splitPoint_ = com.google.protobuf.ByteString.EMPTY;
11030       /**
11031        * <code>optional bytes split_point = 2;</code>
11032        */
11033       public boolean hasSplitPoint() {
11034         return ((bitField0_ & 0x00000002) == 0x00000002);
11035       }
11036       /**
11037        * <code>optional bytes split_point = 2;</code>
11038        */
11039       public com.google.protobuf.ByteString getSplitPoint() {
11040         return splitPoint_;
11041       }
11042       /**
11043        * <code>optional bytes split_point = 2;</code>
11044        */
11045       public Builder setSplitPoint(com.google.protobuf.ByteString value) {
11046         if (value == null) {
11047     throw new NullPointerException();
11048   }
11049   bitField0_ |= 0x00000002;
11050         splitPoint_ = value;
11051         onChanged();
11052         return this;
11053       }
11054       /**
11055        * <code>optional bytes split_point = 2;</code>
11056        */
11057       public Builder clearSplitPoint() {
11058         bitField0_ = (bitField0_ & ~0x00000002);
11059         splitPoint_ = getDefaultInstance().getSplitPoint();
11060         onChanged();
11061         return this;
11062       }
11063 
11064       // @@protoc_insertion_point(builder_scope:hbase.pb.SplitRegionRequest)
11065     }
11066 
11067     static {
11068       defaultInstance = new SplitRegionRequest(true);
11069       defaultInstance.initFields();
11070     }
11071 
11072     // @@protoc_insertion_point(class_scope:hbase.pb.SplitRegionRequest)
11073   }
11074 
11075   public interface SplitRegionResponseOrBuilder
11076       extends com.google.protobuf.MessageOrBuilder {
11077   }
11078   /**
11079    * Protobuf type {@code hbase.pb.SplitRegionResponse}
11080    */
11081   public static final class SplitRegionResponse extends
11082       com.google.protobuf.GeneratedMessage
11083       implements SplitRegionResponseOrBuilder {
11084     // Use SplitRegionResponse.newBuilder() to construct.
11085     private SplitRegionResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
11086       super(builder);
11087       this.unknownFields = builder.getUnknownFields();
11088     }
11089     private SplitRegionResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
11090 
11091     private static final SplitRegionResponse defaultInstance;
11092     public static SplitRegionResponse getDefaultInstance() {
11093       return defaultInstance;
11094     }
11095 
11096     public SplitRegionResponse getDefaultInstanceForType() {
11097       return defaultInstance;
11098     }
11099 
11100     private final com.google.protobuf.UnknownFieldSet unknownFields;
11101     @java.lang.Override
11102     public final com.google.protobuf.UnknownFieldSet
11103         getUnknownFields() {
11104       return this.unknownFields;
11105     }
11106     private SplitRegionResponse(
11107         com.google.protobuf.CodedInputStream input,
11108         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11109         throws com.google.protobuf.InvalidProtocolBufferException {
11110       initFields();
11111       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
11112           com.google.protobuf.UnknownFieldSet.newBuilder();
11113       try {
11114         boolean done = false;
11115         while (!done) {
11116           int tag = input.readTag();
11117           switch (tag) {
11118             case 0:
11119               done = true;
11120               break;
11121             default: {
11122               if (!parseUnknownField(input, unknownFields,
11123                                      extensionRegistry, tag)) {
11124                 done = true;
11125               }
11126               break;
11127             }
11128           }
11129         }
11130       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
11131         throw e.setUnfinishedMessage(this);
11132       } catch (java.io.IOException e) {
11133         throw new com.google.protobuf.InvalidProtocolBufferException(
11134             e.getMessage()).setUnfinishedMessage(this);
11135       } finally {
11136         this.unknownFields = unknownFields.build();
11137         makeExtensionsImmutable();
11138       }
11139     }
11140     public static final com.google.protobuf.Descriptors.Descriptor
11141         getDescriptor() {
11142       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_SplitRegionResponse_descriptor;
11143     }
11144 
11145     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
11146         internalGetFieldAccessorTable() {
11147       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_SplitRegionResponse_fieldAccessorTable
11148           .ensureFieldAccessorsInitialized(
11149               org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.Builder.class);
11150     }
11151 
11152     public static com.google.protobuf.Parser<SplitRegionResponse> PARSER =
11153         new com.google.protobuf.AbstractParser<SplitRegionResponse>() {
11154       public SplitRegionResponse parsePartialFrom(
11155           com.google.protobuf.CodedInputStream input,
11156           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11157           throws com.google.protobuf.InvalidProtocolBufferException {
11158         return new SplitRegionResponse(input, extensionRegistry);
11159       }
11160     };
11161 
11162     @java.lang.Override
11163     public com.google.protobuf.Parser<SplitRegionResponse> getParserForType() {
11164       return PARSER;
11165     }
11166 
11167     private void initFields() {
11168     }
11169     private byte memoizedIsInitialized = -1;
11170     public final boolean isInitialized() {
11171       byte isInitialized = memoizedIsInitialized;
11172       if (isInitialized != -1) return isInitialized == 1;
11173 
11174       memoizedIsInitialized = 1;
11175       return true;
11176     }
11177 
11178     public void writeTo(com.google.protobuf.CodedOutputStream output)
11179                         throws java.io.IOException {
11180       getSerializedSize();
11181       getUnknownFields().writeTo(output);
11182     }
11183 
11184     private int memoizedSerializedSize = -1;
11185     public int getSerializedSize() {
11186       int size = memoizedSerializedSize;
11187       if (size != -1) return size;
11188 
11189       size = 0;
11190       size += getUnknownFields().getSerializedSize();
11191       memoizedSerializedSize = size;
11192       return size;
11193     }
11194 
11195     private static final long serialVersionUID = 0L;
11196     @java.lang.Override
11197     protected java.lang.Object writeReplace()
11198         throws java.io.ObjectStreamException {
11199       return super.writeReplace();
11200     }
11201 
11202     @java.lang.Override
11203     public boolean equals(final java.lang.Object obj) {
11204       if (obj == this) {
11205        return true;
11206       }
11207       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse)) {
11208         return super.equals(obj);
11209       }
11210       org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse) obj;
11211 
11212       boolean result = true;
11213       result = result &&
11214           getUnknownFields().equals(other.getUnknownFields());
11215       return result;
11216     }
11217 
11218     private int memoizedHashCode = 0;
11219     @java.lang.Override
11220     public int hashCode() {
11221       if (memoizedHashCode != 0) {
11222         return memoizedHashCode;
11223       }
11224       int hash = 41;
11225       hash = (19 * hash) + getDescriptorForType().hashCode();
11226       hash = (29 * hash) + getUnknownFields().hashCode();
11227       memoizedHashCode = hash;
11228       return hash;
11229     }
11230 
11231     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom(
11232         com.google.protobuf.ByteString data)
11233         throws com.google.protobuf.InvalidProtocolBufferException {
11234       return PARSER.parseFrom(data);
11235     }
11236     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom(
11237         com.google.protobuf.ByteString data,
11238         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11239         throws com.google.protobuf.InvalidProtocolBufferException {
11240       return PARSER.parseFrom(data, extensionRegistry);
11241     }
11242     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom(byte[] data)
11243         throws com.google.protobuf.InvalidProtocolBufferException {
11244       return PARSER.parseFrom(data);
11245     }
11246     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom(
11247         byte[] data,
11248         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11249         throws com.google.protobuf.InvalidProtocolBufferException {
11250       return PARSER.parseFrom(data, extensionRegistry);
11251     }
11252     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom(java.io.InputStream input)
11253         throws java.io.IOException {
11254       return PARSER.parseFrom(input);
11255     }
11256     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom(
11257         java.io.InputStream input,
11258         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11259         throws java.io.IOException {
11260       return PARSER.parseFrom(input, extensionRegistry);
11261     }
11262     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseDelimitedFrom(java.io.InputStream input)
11263         throws java.io.IOException {
11264       return PARSER.parseDelimitedFrom(input);
11265     }
11266     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseDelimitedFrom(
11267         java.io.InputStream input,
11268         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11269         throws java.io.IOException {
11270       return PARSER.parseDelimitedFrom(input, extensionRegistry);
11271     }
11272     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom(
11273         com.google.protobuf.CodedInputStream input)
11274         throws java.io.IOException {
11275       return PARSER.parseFrom(input);
11276     }
11277     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom(
11278         com.google.protobuf.CodedInputStream input,
11279         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11280         throws java.io.IOException {
11281       return PARSER.parseFrom(input, extensionRegistry);
11282     }
11283 
11284     public static Builder newBuilder() { return Builder.create(); }
11285     public Builder newBuilderForType() { return newBuilder(); }
11286     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse prototype) {
11287       return newBuilder().mergeFrom(prototype);
11288     }
11289     public Builder toBuilder() { return newBuilder(this); }
11290 
11291     @java.lang.Override
11292     protected Builder newBuilderForType(
11293         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
11294       Builder builder = new Builder(parent);
11295       return builder;
11296     }
11297     /**
11298      * Protobuf type {@code hbase.pb.SplitRegionResponse}
11299      */
11300     public static final class Builder extends
11301         com.google.protobuf.GeneratedMessage.Builder<Builder>
11302        implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponseOrBuilder {
11303       public static final com.google.protobuf.Descriptors.Descriptor
11304           getDescriptor() {
11305         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_SplitRegionResponse_descriptor;
11306       }
11307 
11308       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
11309           internalGetFieldAccessorTable() {
11310         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_SplitRegionResponse_fieldAccessorTable
11311             .ensureFieldAccessorsInitialized(
11312                 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.Builder.class);
11313       }
11314 
11315       // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.newBuilder()
11316       private Builder() {
11317         maybeForceBuilderInitialization();
11318       }
11319 
11320       private Builder(
11321           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
11322         super(parent);
11323         maybeForceBuilderInitialization();
11324       }
11325       private void maybeForceBuilderInitialization() {
11326         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
11327         }
11328       }
11329       private static Builder create() {
11330         return new Builder();
11331       }
11332 
11333       public Builder clear() {
11334         super.clear();
11335         return this;
11336       }
11337 
11338       public Builder clone() {
11339         return create().mergeFrom(buildPartial());
11340       }
11341 
11342       public com.google.protobuf.Descriptors.Descriptor
11343           getDescriptorForType() {
11344         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_SplitRegionResponse_descriptor;
11345       }
11346 
11347       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse getDefaultInstanceForType() {
11348         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.getDefaultInstance();
11349       }
11350 
11351       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse build() {
11352         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse result = buildPartial();
11353         if (!result.isInitialized()) {
11354           throw newUninitializedMessageException(result);
11355         }
11356         return result;
11357       }
11358 
11359       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse buildPartial() {
11360         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse(this);
11361         onBuilt();
11362         return result;
11363       }
11364 
11365       public Builder mergeFrom(com.google.protobuf.Message other) {
11366         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse) {
11367           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse)other);
11368         } else {
11369           super.mergeFrom(other);
11370           return this;
11371         }
11372       }
11373 
11374       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse other) {
11375         if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.getDefaultInstance()) return this;
11376         this.mergeUnknownFields(other.getUnknownFields());
11377         return this;
11378       }
11379 
11380       public final boolean isInitialized() {
11381         return true;
11382       }
11383 
11384       public Builder mergeFrom(
11385           com.google.protobuf.CodedInputStream input,
11386           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11387           throws java.io.IOException {
11388         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parsedMessage = null;
11389         try {
11390           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
11391         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
11392           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse) e.getUnfinishedMessage();
11393           throw e;
11394         } finally {
11395           if (parsedMessage != null) {
11396             mergeFrom(parsedMessage);
11397           }
11398         }
11399         return this;
11400       }
11401 
11402       // @@protoc_insertion_point(builder_scope:hbase.pb.SplitRegionResponse)
11403     }
11404 
11405     static {
11406       defaultInstance = new SplitRegionResponse(true);
11407       defaultInstance.initFields();
11408     }
11409 
11410     // @@protoc_insertion_point(class_scope:hbase.pb.SplitRegionResponse)
11411   }
11412 
11413   public interface CompactRegionRequestOrBuilder
11414       extends com.google.protobuf.MessageOrBuilder {
11415 
11416     // required .hbase.pb.RegionSpecifier region = 1;
11417     /**
11418      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
11419      */
11420     boolean hasRegion();
11421     /**
11422      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
11423      */
11424     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
11425     /**
11426      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
11427      */
11428     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
11429 
11430     // optional bool major = 2;
11431     /**
11432      * <code>optional bool major = 2;</code>
11433      */
11434     boolean hasMajor();
11435     /**
11436      * <code>optional bool major = 2;</code>
11437      */
11438     boolean getMajor();
11439 
11440     // optional bytes family = 3;
11441     /**
11442      * <code>optional bytes family = 3;</code>
11443      */
11444     boolean hasFamily();
11445     /**
11446      * <code>optional bytes family = 3;</code>
11447      */
11448     com.google.protobuf.ByteString getFamily();
11449   }
11450   /**
11451    * Protobuf type {@code hbase.pb.CompactRegionRequest}
11452    *
11453    * <pre>
11454    **
11455    * Compacts the specified region.  Performs a major compaction if specified.
11456    * &lt;p&gt;
11457    * This method is asynchronous.
11458    * </pre>
11459    */
11460   public static final class CompactRegionRequest extends
11461       com.google.protobuf.GeneratedMessage
11462       implements CompactRegionRequestOrBuilder {
11463     // Use CompactRegionRequest.newBuilder() to construct.
11464     private CompactRegionRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
11465       super(builder);
11466       this.unknownFields = builder.getUnknownFields();
11467     }
11468     private CompactRegionRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
11469 
11470     private static final CompactRegionRequest defaultInstance;
11471     public static CompactRegionRequest getDefaultInstance() {
11472       return defaultInstance;
11473     }
11474 
11475     public CompactRegionRequest getDefaultInstanceForType() {
11476       return defaultInstance;
11477     }
11478 
11479     private final com.google.protobuf.UnknownFieldSet unknownFields;
11480     @java.lang.Override
11481     public final com.google.protobuf.UnknownFieldSet
11482         getUnknownFields() {
11483       return this.unknownFields;
11484     }
11485     private CompactRegionRequest(
11486         com.google.protobuf.CodedInputStream input,
11487         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11488         throws com.google.protobuf.InvalidProtocolBufferException {
11489       initFields();
11490       int mutable_bitField0_ = 0;
11491       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
11492           com.google.protobuf.UnknownFieldSet.newBuilder();
11493       try {
11494         boolean done = false;
11495         while (!done) {
11496           int tag = input.readTag();
11497           switch (tag) {
11498             case 0:
11499               done = true;
11500               break;
11501             default: {
11502               if (!parseUnknownField(input, unknownFields,
11503                                      extensionRegistry, tag)) {
11504                 done = true;
11505               }
11506               break;
11507             }
11508             case 10: {
11509               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null;
11510               if (((bitField0_ & 0x00000001) == 0x00000001)) {
11511                 subBuilder = region_.toBuilder();
11512               }
11513               region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry);
11514               if (subBuilder != null) {
11515                 subBuilder.mergeFrom(region_);
11516                 region_ = subBuilder.buildPartial();
11517               }
11518               bitField0_ |= 0x00000001;
11519               break;
11520             }
11521             case 16: {
11522               bitField0_ |= 0x00000002;
11523               major_ = input.readBool();
11524               break;
11525             }
11526             case 26: {
11527               bitField0_ |= 0x00000004;
11528               family_ = input.readBytes();
11529               break;
11530             }
11531           }
11532         }
11533       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
11534         throw e.setUnfinishedMessage(this);
11535       } catch (java.io.IOException e) {
11536         throw new com.google.protobuf.InvalidProtocolBufferException(
11537             e.getMessage()).setUnfinishedMessage(this);
11538       } finally {
11539         this.unknownFields = unknownFields.build();
11540         makeExtensionsImmutable();
11541       }
11542     }
11543     public static final com.google.protobuf.Descriptors.Descriptor
11544         getDescriptor() {
11545       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_CompactRegionRequest_descriptor;
11546     }
11547 
11548     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
11549         internalGetFieldAccessorTable() {
11550       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_CompactRegionRequest_fieldAccessorTable
11551           .ensureFieldAccessorsInitialized(
11552               org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.Builder.class);
11553     }
11554 
11555     public static com.google.protobuf.Parser<CompactRegionRequest> PARSER =
11556         new com.google.protobuf.AbstractParser<CompactRegionRequest>() {
11557       public CompactRegionRequest parsePartialFrom(
11558           com.google.protobuf.CodedInputStream input,
11559           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11560           throws com.google.protobuf.InvalidProtocolBufferException {
11561         return new CompactRegionRequest(input, extensionRegistry);
11562       }
11563     };
11564 
11565     @java.lang.Override
11566     public com.google.protobuf.Parser<CompactRegionRequest> getParserForType() {
11567       return PARSER;
11568     }
11569 
11570     private int bitField0_;
11571     // required .hbase.pb.RegionSpecifier region = 1;
11572     public static final int REGION_FIELD_NUMBER = 1;
11573     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_;
11574     /**
11575      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
11576      */
11577     public boolean hasRegion() {
11578       return ((bitField0_ & 0x00000001) == 0x00000001);
11579     }
11580     /**
11581      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
11582      */
11583     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
11584       return region_;
11585     }
11586     /**
11587      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
11588      */
11589     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
11590       return region_;
11591     }
11592 
11593     // optional bool major = 2;
11594     public static final int MAJOR_FIELD_NUMBER = 2;
11595     private boolean major_;
11596     /**
11597      * <code>optional bool major = 2;</code>
11598      */
11599     public boolean hasMajor() {
11600       return ((bitField0_ & 0x00000002) == 0x00000002);
11601     }
11602     /**
11603      * <code>optional bool major = 2;</code>
11604      */
11605     public boolean getMajor() {
11606       return major_;
11607     }
11608 
11609     // optional bytes family = 3;
11610     public static final int FAMILY_FIELD_NUMBER = 3;
11611     private com.google.protobuf.ByteString family_;
11612     /**
11613      * <code>optional bytes family = 3;</code>
11614      */
11615     public boolean hasFamily() {
11616       return ((bitField0_ & 0x00000004) == 0x00000004);
11617     }
11618     /**
11619      * <code>optional bytes family = 3;</code>
11620      */
11621     public com.google.protobuf.ByteString getFamily() {
11622       return family_;
11623     }
11624 
11625     private void initFields() {
11626       region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
11627       major_ = false;
11628       family_ = com.google.protobuf.ByteString.EMPTY;
11629     }
11630     private byte memoizedIsInitialized = -1;
11631     public final boolean isInitialized() {
11632       byte isInitialized = memoizedIsInitialized;
11633       if (isInitialized != -1) return isInitialized == 1;
11634 
11635       if (!hasRegion()) {
11636         memoizedIsInitialized = 0;
11637         return false;
11638       }
11639       if (!getRegion().isInitialized()) {
11640         memoizedIsInitialized = 0;
11641         return false;
11642       }
11643       memoizedIsInitialized = 1;
11644       return true;
11645     }
11646 
11647     public void writeTo(com.google.protobuf.CodedOutputStream output)
11648                         throws java.io.IOException {
11649       getSerializedSize();
11650       if (((bitField0_ & 0x00000001) == 0x00000001)) {
11651         output.writeMessage(1, region_);
11652       }
11653       if (((bitField0_ & 0x00000002) == 0x00000002)) {
11654         output.writeBool(2, major_);
11655       }
11656       if (((bitField0_ & 0x00000004) == 0x00000004)) {
11657         output.writeBytes(3, family_);
11658       }
11659       getUnknownFields().writeTo(output);
11660     }
11661 
11662     private int memoizedSerializedSize = -1;
11663     public int getSerializedSize() {
11664       int size = memoizedSerializedSize;
11665       if (size != -1) return size;
11666 
11667       size = 0;
11668       if (((bitField0_ & 0x00000001) == 0x00000001)) {
11669         size += com.google.protobuf.CodedOutputStream
11670           .computeMessageSize(1, region_);
11671       }
11672       if (((bitField0_ & 0x00000002) == 0x00000002)) {
11673         size += com.google.protobuf.CodedOutputStream
11674           .computeBoolSize(2, major_);
11675       }
11676       if (((bitField0_ & 0x00000004) == 0x00000004)) {
11677         size += com.google.protobuf.CodedOutputStream
11678           .computeBytesSize(3, family_);
11679       }
11680       size += getUnknownFields().getSerializedSize();
11681       memoizedSerializedSize = size;
11682       return size;
11683     }
11684 
11685     private static final long serialVersionUID = 0L;
11686     @java.lang.Override
11687     protected java.lang.Object writeReplace()
11688         throws java.io.ObjectStreamException {
11689       return super.writeReplace();
11690     }
11691 
11692     @java.lang.Override
11693     public boolean equals(final java.lang.Object obj) {
11694       if (obj == this) {
11695        return true;
11696       }
11697       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest)) {
11698         return super.equals(obj);
11699       }
11700       org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest) obj;
11701 
11702       boolean result = true;
11703       result = result && (hasRegion() == other.hasRegion());
11704       if (hasRegion()) {
11705         result = result && getRegion()
11706             .equals(other.getRegion());
11707       }
11708       result = result && (hasMajor() == other.hasMajor());
11709       if (hasMajor()) {
11710         result = result && (getMajor()
11711             == other.getMajor());
11712       }
11713       result = result && (hasFamily() == other.hasFamily());
11714       if (hasFamily()) {
11715         result = result && getFamily()
11716             .equals(other.getFamily());
11717       }
11718       result = result &&
11719           getUnknownFields().equals(other.getUnknownFields());
11720       return result;
11721     }
11722 
11723     private int memoizedHashCode = 0;
11724     @java.lang.Override
11725     public int hashCode() {
11726       if (memoizedHashCode != 0) {
11727         return memoizedHashCode;
11728       }
11729       int hash = 41;
11730       hash = (19 * hash) + getDescriptorForType().hashCode();
11731       if (hasRegion()) {
11732         hash = (37 * hash) + REGION_FIELD_NUMBER;
11733         hash = (53 * hash) + getRegion().hashCode();
11734       }
11735       if (hasMajor()) {
11736         hash = (37 * hash) + MAJOR_FIELD_NUMBER;
11737         hash = (53 * hash) + hashBoolean(getMajor());
11738       }
11739       if (hasFamily()) {
11740         hash = (37 * hash) + FAMILY_FIELD_NUMBER;
11741         hash = (53 * hash) + getFamily().hashCode();
11742       }
11743       hash = (29 * hash) + getUnknownFields().hashCode();
11744       memoizedHashCode = hash;
11745       return hash;
11746     }
11747 
11748     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom(
11749         com.google.protobuf.ByteString data)
11750         throws com.google.protobuf.InvalidProtocolBufferException {
11751       return PARSER.parseFrom(data);
11752     }
11753     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom(
11754         com.google.protobuf.ByteString data,
11755         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11756         throws com.google.protobuf.InvalidProtocolBufferException {
11757       return PARSER.parseFrom(data, extensionRegistry);
11758     }
11759     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom(byte[] data)
11760         throws com.google.protobuf.InvalidProtocolBufferException {
11761       return PARSER.parseFrom(data);
11762     }
11763     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom(
11764         byte[] data,
11765         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11766         throws com.google.protobuf.InvalidProtocolBufferException {
11767       return PARSER.parseFrom(data, extensionRegistry);
11768     }
11769     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom(java.io.InputStream input)
11770         throws java.io.IOException {
11771       return PARSER.parseFrom(input);
11772     }
11773     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom(
11774         java.io.InputStream input,
11775         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11776         throws java.io.IOException {
11777       return PARSER.parseFrom(input, extensionRegistry);
11778     }
11779     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseDelimitedFrom(java.io.InputStream input)
11780         throws java.io.IOException {
11781       return PARSER.parseDelimitedFrom(input);
11782     }
11783     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseDelimitedFrom(
11784         java.io.InputStream input,
11785         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11786         throws java.io.IOException {
11787       return PARSER.parseDelimitedFrom(input, extensionRegistry);
11788     }
11789     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom(
11790         com.google.protobuf.CodedInputStream input)
11791         throws java.io.IOException {
11792       return PARSER.parseFrom(input);
11793     }
11794     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom(
11795         com.google.protobuf.CodedInputStream input,
11796         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11797         throws java.io.IOException {
11798       return PARSER.parseFrom(input, extensionRegistry);
11799     }
11800 
11801     public static Builder newBuilder() { return Builder.create(); }
11802     public Builder newBuilderForType() { return newBuilder(); }
11803     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest prototype) {
11804       return newBuilder().mergeFrom(prototype);
11805     }
11806     public Builder toBuilder() { return newBuilder(this); }
11807 
11808     @java.lang.Override
11809     protected Builder newBuilderForType(
11810         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
11811       Builder builder = new Builder(parent);
11812       return builder;
11813     }
11814     /**
11815      * Protobuf type {@code hbase.pb.CompactRegionRequest}
11816      *
11817      * <pre>
11818      **
11819      * Compacts the specified region.  Performs a major compaction if specified.
11820      * &lt;p&gt;
11821      * This method is asynchronous.
11822      * </pre>
11823      */
11824     public static final class Builder extends
11825         com.google.protobuf.GeneratedMessage.Builder<Builder>
11826        implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequestOrBuilder {
11827       public static final com.google.protobuf.Descriptors.Descriptor
11828           getDescriptor() {
11829         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_CompactRegionRequest_descriptor;
11830       }
11831 
11832       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
11833           internalGetFieldAccessorTable() {
11834         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_CompactRegionRequest_fieldAccessorTable
11835             .ensureFieldAccessorsInitialized(
11836                 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.Builder.class);
11837       }
11838 
11839       // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.newBuilder()
11840       private Builder() {
11841         maybeForceBuilderInitialization();
11842       }
11843 
11844       private Builder(
11845           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
11846         super(parent);
11847         maybeForceBuilderInitialization();
11848       }
11849       private void maybeForceBuilderInitialization() {
11850         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
11851           getRegionFieldBuilder();
11852         }
11853       }
11854       private static Builder create() {
11855         return new Builder();
11856       }
11857 
11858       public Builder clear() {
11859         super.clear();
11860         if (regionBuilder_ == null) {
11861           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
11862         } else {
11863           regionBuilder_.clear();
11864         }
11865         bitField0_ = (bitField0_ & ~0x00000001);
11866         major_ = false;
11867         bitField0_ = (bitField0_ & ~0x00000002);
11868         family_ = com.google.protobuf.ByteString.EMPTY;
11869         bitField0_ = (bitField0_ & ~0x00000004);
11870         return this;
11871       }
11872 
11873       public Builder clone() {
11874         return create().mergeFrom(buildPartial());
11875       }
11876 
11877       public com.google.protobuf.Descriptors.Descriptor
11878           getDescriptorForType() {
11879         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_CompactRegionRequest_descriptor;
11880       }
11881 
11882       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest getDefaultInstanceForType() {
11883         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.getDefaultInstance();
11884       }
11885 
11886       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest build() {
11887         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest result = buildPartial();
11888         if (!result.isInitialized()) {
11889           throw newUninitializedMessageException(result);
11890         }
11891         return result;
11892       }
11893 
11894       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest buildPartial() {
11895         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest(this);
11896         int from_bitField0_ = bitField0_;
11897         int to_bitField0_ = 0;
11898         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
11899           to_bitField0_ |= 0x00000001;
11900         }
11901         if (regionBuilder_ == null) {
11902           result.region_ = region_;
11903         } else {
11904           result.region_ = regionBuilder_.build();
11905         }
11906         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
11907           to_bitField0_ |= 0x00000002;
11908         }
11909         result.major_ = major_;
11910         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
11911           to_bitField0_ |= 0x00000004;
11912         }
11913         result.family_ = family_;
11914         result.bitField0_ = to_bitField0_;
11915         onBuilt();
11916         return result;
11917       }
11918 
11919       public Builder mergeFrom(com.google.protobuf.Message other) {
11920         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest) {
11921           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest)other);
11922         } else {
11923           super.mergeFrom(other);
11924           return this;
11925         }
11926       }
11927 
11928       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest other) {
11929         if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.getDefaultInstance()) return this;
11930         if (other.hasRegion()) {
11931           mergeRegion(other.getRegion());
11932         }
11933         if (other.hasMajor()) {
11934           setMajor(other.getMajor());
11935         }
11936         if (other.hasFamily()) {
11937           setFamily(other.getFamily());
11938         }
11939         this.mergeUnknownFields(other.getUnknownFields());
11940         return this;
11941       }
11942 
11943       public final boolean isInitialized() {
11944         if (!hasRegion()) {
11945           
11946           return false;
11947         }
11948         if (!getRegion().isInitialized()) {
11949           
11950           return false;
11951         }
11952         return true;
11953       }
11954 
11955       public Builder mergeFrom(
11956           com.google.protobuf.CodedInputStream input,
11957           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11958           throws java.io.IOException {
11959         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parsedMessage = null;
11960         try {
11961           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
11962         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
11963           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest) e.getUnfinishedMessage();
11964           throw e;
11965         } finally {
11966           if (parsedMessage != null) {
11967             mergeFrom(parsedMessage);
11968           }
11969         }
11970         return this;
11971       }
11972       private int bitField0_;
11973 
11974       // required .hbase.pb.RegionSpecifier region = 1;
11975       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
11976       private com.google.protobuf.SingleFieldBuilder<
11977           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_;
11978       /**
11979        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
11980        */
11981       public boolean hasRegion() {
11982         return ((bitField0_ & 0x00000001) == 0x00000001);
11983       }
11984       /**
11985        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
11986        */
11987       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
11988         if (regionBuilder_ == null) {
11989           return region_;
11990         } else {
11991           return regionBuilder_.getMessage();
11992         }
11993       }
11994       /**
11995        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
11996        */
11997       public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
11998         if (regionBuilder_ == null) {
11999           if (value == null) {
12000             throw new NullPointerException();
12001           }
12002           region_ = value;
12003           onChanged();
12004         } else {
12005           regionBuilder_.setMessage(value);
12006         }
12007         bitField0_ |= 0x00000001;
12008         return this;
12009       }
12010       /**
12011        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
12012        */
12013       public Builder setRegion(
12014           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
12015         if (regionBuilder_ == null) {
12016           region_ = builderForValue.build();
12017           onChanged();
12018         } else {
12019           regionBuilder_.setMessage(builderForValue.build());
12020         }
12021         bitField0_ |= 0x00000001;
12022         return this;
12023       }
12024       /**
12025        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
12026        */
12027       public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
12028         if (regionBuilder_ == null) {
12029           if (((bitField0_ & 0x00000001) == 0x00000001) &&
12030               region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) {
12031             region_ =
12032               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial();
12033           } else {
12034             region_ = value;
12035           }
12036           onChanged();
12037         } else {
12038           regionBuilder_.mergeFrom(value);
12039         }
12040         bitField0_ |= 0x00000001;
12041         return this;
12042       }
12043       /**
12044        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
12045        */
12046       public Builder clearRegion() {
12047         if (regionBuilder_ == null) {
12048           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
12049           onChanged();
12050         } else {
12051           regionBuilder_.clear();
12052         }
12053         bitField0_ = (bitField0_ & ~0x00000001);
12054         return this;
12055       }
12056       /**
12057        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
12058        */
12059       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() {
12060         bitField0_ |= 0x00000001;
12061         onChanged();
12062         return getRegionFieldBuilder().getBuilder();
12063       }
12064       /**
12065        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
12066        */
12067       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
12068         if (regionBuilder_ != null) {
12069           return regionBuilder_.getMessageOrBuilder();
12070         } else {
12071           return region_;
12072         }
12073       }
12074       /**
12075        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
12076        */
12077       private com.google.protobuf.SingleFieldBuilder<
12078           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> 
12079           getRegionFieldBuilder() {
12080         if (regionBuilder_ == null) {
12081           regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
12082               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
12083                   region_,
12084                   getParentForChildren(),
12085                   isClean());
12086           region_ = null;
12087         }
12088         return regionBuilder_;
12089       }
12090 
12091       // optional bool major = 2;
12092       private boolean major_ ;
12093       /**
12094        * <code>optional bool major = 2;</code>
12095        */
12096       public boolean hasMajor() {
12097         return ((bitField0_ & 0x00000002) == 0x00000002);
12098       }
12099       /**
12100        * <code>optional bool major = 2;</code>
12101        */
12102       public boolean getMajor() {
12103         return major_;
12104       }
12105       /**
12106        * <code>optional bool major = 2;</code>
12107        */
12108       public Builder setMajor(boolean value) {
12109         bitField0_ |= 0x00000002;
12110         major_ = value;
12111         onChanged();
12112         return this;
12113       }
12114       /**
12115        * <code>optional bool major = 2;</code>
12116        */
12117       public Builder clearMajor() {
12118         bitField0_ = (bitField0_ & ~0x00000002);
12119         major_ = false;
12120         onChanged();
12121         return this;
12122       }
12123 
12124       // optional bytes family = 3;
12125       private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY;
12126       /**
12127        * <code>optional bytes family = 3;</code>
12128        */
12129       public boolean hasFamily() {
12130         return ((bitField0_ & 0x00000004) == 0x00000004);
12131       }
12132       /**
12133        * <code>optional bytes family = 3;</code>
12134        */
12135       public com.google.protobuf.ByteString getFamily() {
12136         return family_;
12137       }
12138       /**
12139        * <code>optional bytes family = 3;</code>
12140        */
12141       public Builder setFamily(com.google.protobuf.ByteString value) {
12142         if (value == null) {
12143     throw new NullPointerException();
12144   }
12145   bitField0_ |= 0x00000004;
12146         family_ = value;
12147         onChanged();
12148         return this;
12149       }
12150       /**
12151        * <code>optional bytes family = 3;</code>
12152        */
12153       public Builder clearFamily() {
12154         bitField0_ = (bitField0_ & ~0x00000004);
12155         family_ = getDefaultInstance().getFamily();
12156         onChanged();
12157         return this;
12158       }
12159 
12160       // @@protoc_insertion_point(builder_scope:hbase.pb.CompactRegionRequest)
12161     }
12162 
12163     static {
12164       defaultInstance = new CompactRegionRequest(true);
12165       defaultInstance.initFields();
12166     }
12167 
12168     // @@protoc_insertion_point(class_scope:hbase.pb.CompactRegionRequest)
12169   }
12170 
12171   public interface CompactRegionResponseOrBuilder
12172       extends com.google.protobuf.MessageOrBuilder {
12173   }
12174   /**
12175    * Protobuf type {@code hbase.pb.CompactRegionResponse}
12176    */
12177   public static final class CompactRegionResponse extends
12178       com.google.protobuf.GeneratedMessage
12179       implements CompactRegionResponseOrBuilder {
12180     // Use CompactRegionResponse.newBuilder() to construct.
12181     private CompactRegionResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
12182       super(builder);
12183       this.unknownFields = builder.getUnknownFields();
12184     }
12185     private CompactRegionResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
12186 
12187     private static final CompactRegionResponse defaultInstance;
12188     public static CompactRegionResponse getDefaultInstance() {
12189       return defaultInstance;
12190     }
12191 
12192     public CompactRegionResponse getDefaultInstanceForType() {
12193       return defaultInstance;
12194     }
12195 
12196     private final com.google.protobuf.UnknownFieldSet unknownFields;
12197     @java.lang.Override
12198     public final com.google.protobuf.UnknownFieldSet
12199         getUnknownFields() {
12200       return this.unknownFields;
12201     }
12202     private CompactRegionResponse(
12203         com.google.protobuf.CodedInputStream input,
12204         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12205         throws com.google.protobuf.InvalidProtocolBufferException {
12206       initFields();
12207       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
12208           com.google.protobuf.UnknownFieldSet.newBuilder();
12209       try {
12210         boolean done = false;
12211         while (!done) {
12212           int tag = input.readTag();
12213           switch (tag) {
12214             case 0:
12215               done = true;
12216               break;
12217             default: {
12218               if (!parseUnknownField(input, unknownFields,
12219                                      extensionRegistry, tag)) {
12220                 done = true;
12221               }
12222               break;
12223             }
12224           }
12225         }
12226       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
12227         throw e.setUnfinishedMessage(this);
12228       } catch (java.io.IOException e) {
12229         throw new com.google.protobuf.InvalidProtocolBufferException(
12230             e.getMessage()).setUnfinishedMessage(this);
12231       } finally {
12232         this.unknownFields = unknownFields.build();
12233         makeExtensionsImmutable();
12234       }
12235     }
12236     public static final com.google.protobuf.Descriptors.Descriptor
12237         getDescriptor() {
12238       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_CompactRegionResponse_descriptor;
12239     }
12240 
12241     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
12242         internalGetFieldAccessorTable() {
12243       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_CompactRegionResponse_fieldAccessorTable
12244           .ensureFieldAccessorsInitialized(
12245               org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.Builder.class);
12246     }
12247 
12248     public static com.google.protobuf.Parser<CompactRegionResponse> PARSER =
12249         new com.google.protobuf.AbstractParser<CompactRegionResponse>() {
12250       public CompactRegionResponse parsePartialFrom(
12251           com.google.protobuf.CodedInputStream input,
12252           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12253           throws com.google.protobuf.InvalidProtocolBufferException {
12254         return new CompactRegionResponse(input, extensionRegistry);
12255       }
12256     };
12257 
12258     @java.lang.Override
12259     public com.google.protobuf.Parser<CompactRegionResponse> getParserForType() {
12260       return PARSER;
12261     }
12262 
12263     private void initFields() {
12264     }
12265     private byte memoizedIsInitialized = -1;
12266     public final boolean isInitialized() {
12267       byte isInitialized = memoizedIsInitialized;
12268       if (isInitialized != -1) return isInitialized == 1;
12269 
12270       memoizedIsInitialized = 1;
12271       return true;
12272     }
12273 
12274     public void writeTo(com.google.protobuf.CodedOutputStream output)
12275                         throws java.io.IOException {
12276       getSerializedSize();
12277       getUnknownFields().writeTo(output);
12278     }
12279 
12280     private int memoizedSerializedSize = -1;
12281     public int getSerializedSize() {
12282       int size = memoizedSerializedSize;
12283       if (size != -1) return size;
12284 
12285       size = 0;
12286       size += getUnknownFields().getSerializedSize();
12287       memoizedSerializedSize = size;
12288       return size;
12289     }
12290 
12291     private static final long serialVersionUID = 0L;
12292     @java.lang.Override
12293     protected java.lang.Object writeReplace()
12294         throws java.io.ObjectStreamException {
12295       return super.writeReplace();
12296     }
12297 
12298     @java.lang.Override
12299     public boolean equals(final java.lang.Object obj) {
12300       if (obj == this) {
12301        return true;
12302       }
12303       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse)) {
12304         return super.equals(obj);
12305       }
12306       org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse) obj;
12307 
12308       boolean result = true;
12309       result = result &&
12310           getUnknownFields().equals(other.getUnknownFields());
12311       return result;
12312     }
12313 
12314     private int memoizedHashCode = 0;
12315     @java.lang.Override
12316     public int hashCode() {
12317       if (memoizedHashCode != 0) {
12318         return memoizedHashCode;
12319       }
12320       int hash = 41;
12321       hash = (19 * hash) + getDescriptorForType().hashCode();
12322       hash = (29 * hash) + getUnknownFields().hashCode();
12323       memoizedHashCode = hash;
12324       return hash;
12325     }
12326 
12327     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom(
12328         com.google.protobuf.ByteString data)
12329         throws com.google.protobuf.InvalidProtocolBufferException {
12330       return PARSER.parseFrom(data);
12331     }
12332     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom(
12333         com.google.protobuf.ByteString data,
12334         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12335         throws com.google.protobuf.InvalidProtocolBufferException {
12336       return PARSER.parseFrom(data, extensionRegistry);
12337     }
12338     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom(byte[] data)
12339         throws com.google.protobuf.InvalidProtocolBufferException {
12340       return PARSER.parseFrom(data);
12341     }
12342     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom(
12343         byte[] data,
12344         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12345         throws com.google.protobuf.InvalidProtocolBufferException {
12346       return PARSER.parseFrom(data, extensionRegistry);
12347     }
12348     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom(java.io.InputStream input)
12349         throws java.io.IOException {
12350       return PARSER.parseFrom(input);
12351     }
12352     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom(
12353         java.io.InputStream input,
12354         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12355         throws java.io.IOException {
12356       return PARSER.parseFrom(input, extensionRegistry);
12357     }
12358     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseDelimitedFrom(java.io.InputStream input)
12359         throws java.io.IOException {
12360       return PARSER.parseDelimitedFrom(input);
12361     }
12362     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseDelimitedFrom(
12363         java.io.InputStream input,
12364         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12365         throws java.io.IOException {
12366       return PARSER.parseDelimitedFrom(input, extensionRegistry);
12367     }
12368     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom(
12369         com.google.protobuf.CodedInputStream input)
12370         throws java.io.IOException {
12371       return PARSER.parseFrom(input);
12372     }
12373     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom(
12374         com.google.protobuf.CodedInputStream input,
12375         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12376         throws java.io.IOException {
12377       return PARSER.parseFrom(input, extensionRegistry);
12378     }
12379 
12380     public static Builder newBuilder() { return Builder.create(); }
12381     public Builder newBuilderForType() { return newBuilder(); }
12382     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse prototype) {
12383       return newBuilder().mergeFrom(prototype);
12384     }
12385     public Builder toBuilder() { return newBuilder(this); }
12386 
12387     @java.lang.Override
12388     protected Builder newBuilderForType(
12389         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
12390       Builder builder = new Builder(parent);
12391       return builder;
12392     }
12393     /**
12394      * Protobuf type {@code hbase.pb.CompactRegionResponse}
12395      */
12396     public static final class Builder extends
12397         com.google.protobuf.GeneratedMessage.Builder<Builder>
12398        implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponseOrBuilder {
12399       public static final com.google.protobuf.Descriptors.Descriptor
12400           getDescriptor() {
12401         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_CompactRegionResponse_descriptor;
12402       }
12403 
12404       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
12405           internalGetFieldAccessorTable() {
12406         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_CompactRegionResponse_fieldAccessorTable
12407             .ensureFieldAccessorsInitialized(
12408                 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.Builder.class);
12409       }
12410 
12411       // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.newBuilder()
12412       private Builder() {
12413         maybeForceBuilderInitialization();
12414       }
12415 
12416       private Builder(
12417           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
12418         super(parent);
12419         maybeForceBuilderInitialization();
12420       }
12421       private void maybeForceBuilderInitialization() {
12422         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
12423         }
12424       }
12425       private static Builder create() {
12426         return new Builder();
12427       }
12428 
12429       public Builder clear() {
12430         super.clear();
12431         return this;
12432       }
12433 
12434       public Builder clone() {
12435         return create().mergeFrom(buildPartial());
12436       }
12437 
12438       public com.google.protobuf.Descriptors.Descriptor
12439           getDescriptorForType() {
12440         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_CompactRegionResponse_descriptor;
12441       }
12442 
12443       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse getDefaultInstanceForType() {
12444         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.getDefaultInstance();
12445       }
12446 
12447       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse build() {
12448         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse result = buildPartial();
12449         if (!result.isInitialized()) {
12450           throw newUninitializedMessageException(result);
12451         }
12452         return result;
12453       }
12454 
12455       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse buildPartial() {
12456         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse(this);
12457         onBuilt();
12458         return result;
12459       }
12460 
12461       public Builder mergeFrom(com.google.protobuf.Message other) {
12462         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse) {
12463           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse)other);
12464         } else {
12465           super.mergeFrom(other);
12466           return this;
12467         }
12468       }
12469 
12470       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse other) {
12471         if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.getDefaultInstance()) return this;
12472         this.mergeUnknownFields(other.getUnknownFields());
12473         return this;
12474       }
12475 
12476       public final boolean isInitialized() {
12477         return true;
12478       }
12479 
12480       public Builder mergeFrom(
12481           com.google.protobuf.CodedInputStream input,
12482           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12483           throws java.io.IOException {
12484         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parsedMessage = null;
12485         try {
12486           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
12487         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
12488           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse) e.getUnfinishedMessage();
12489           throw e;
12490         } finally {
12491           if (parsedMessage != null) {
12492             mergeFrom(parsedMessage);
12493           }
12494         }
12495         return this;
12496       }
12497 
12498       // @@protoc_insertion_point(builder_scope:hbase.pb.CompactRegionResponse)
12499     }
12500 
12501     static {
12502       defaultInstance = new CompactRegionResponse(true);
12503       defaultInstance.initFields();
12504     }
12505 
12506     // @@protoc_insertion_point(class_scope:hbase.pb.CompactRegionResponse)
12507   }
12508 
12509   public interface UpdateFavoredNodesRequestOrBuilder
12510       extends com.google.protobuf.MessageOrBuilder {
12511 
12512     // repeated .hbase.pb.UpdateFavoredNodesRequest.RegionUpdateInfo update_info = 1;
12513     /**
12514      * <code>repeated .hbase.pb.UpdateFavoredNodesRequest.RegionUpdateInfo update_info = 1;</code>
12515      */
12516     java.util.List<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo> 
12517         getUpdateInfoList();
12518     /**
12519      * <code>repeated .hbase.pb.UpdateFavoredNodesRequest.RegionUpdateInfo update_info = 1;</code>
12520      */
12521     org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo getUpdateInfo(int index);
12522     /**
12523      * <code>repeated .hbase.pb.UpdateFavoredNodesRequest.RegionUpdateInfo update_info = 1;</code>
12524      */
12525     int getUpdateInfoCount();
12526     /**
12527      * <code>repeated .hbase.pb.UpdateFavoredNodesRequest.RegionUpdateInfo update_info = 1;</code>
12528      */
12529     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfoOrBuilder> 
12530         getUpdateInfoOrBuilderList();
12531     /**
12532      * <code>repeated .hbase.pb.UpdateFavoredNodesRequest.RegionUpdateInfo update_info = 1;</code>
12533      */
12534     org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfoOrBuilder getUpdateInfoOrBuilder(
12535         int index);
12536   }
12537   /**
12538    * Protobuf type {@code hbase.pb.UpdateFavoredNodesRequest}
12539    */
12540   public static final class UpdateFavoredNodesRequest extends
12541       com.google.protobuf.GeneratedMessage
12542       implements UpdateFavoredNodesRequestOrBuilder {
12543     // Use UpdateFavoredNodesRequest.newBuilder() to construct.
12544     private UpdateFavoredNodesRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
12545       super(builder);
12546       this.unknownFields = builder.getUnknownFields();
12547     }
12548     private UpdateFavoredNodesRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
12549 
12550     private static final UpdateFavoredNodesRequest defaultInstance;
12551     public static UpdateFavoredNodesRequest getDefaultInstance() {
12552       return defaultInstance;
12553     }
12554 
12555     public UpdateFavoredNodesRequest getDefaultInstanceForType() {
12556       return defaultInstance;
12557     }
12558 
12559     private final com.google.protobuf.UnknownFieldSet unknownFields;
12560     @java.lang.Override
12561     public final com.google.protobuf.UnknownFieldSet
12562         getUnknownFields() {
12563       return this.unknownFields;
12564     }
12565     private UpdateFavoredNodesRequest(
12566         com.google.protobuf.CodedInputStream input,
12567         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12568         throws com.google.protobuf.InvalidProtocolBufferException {
12569       initFields();
12570       int mutable_bitField0_ = 0;
12571       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
12572           com.google.protobuf.UnknownFieldSet.newBuilder();
12573       try {
12574         boolean done = false;
12575         while (!done) {
12576           int tag = input.readTag();
12577           switch (tag) {
12578             case 0:
12579               done = true;
12580               break;
12581             default: {
12582               if (!parseUnknownField(input, unknownFields,
12583                                      extensionRegistry, tag)) {
12584                 done = true;
12585               }
12586               break;
12587             }
12588             case 10: {
12589               if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
12590                 updateInfo_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo>();
12591                 mutable_bitField0_ |= 0x00000001;
12592               }
12593               updateInfo_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo.PARSER, extensionRegistry));
12594               break;
12595             }
12596           }
12597         }
12598       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
12599         throw e.setUnfinishedMessage(this);
12600       } catch (java.io.IOException e) {
12601         throw new com.google.protobuf.InvalidProtocolBufferException(
12602             e.getMessage()).setUnfinishedMessage(this);
12603       } finally {
12604         if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
12605           updateInfo_ = java.util.Collections.unmodifiableList(updateInfo_);
12606         }
12607         this.unknownFields = unknownFields.build();
12608         makeExtensionsImmutable();
12609       }
12610     }
12611     public static final com.google.protobuf.Descriptors.Descriptor
12612         getDescriptor() {
12613       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_UpdateFavoredNodesRequest_descriptor;
12614     }
12615 
12616     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
12617         internalGetFieldAccessorTable() {
12618       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_UpdateFavoredNodesRequest_fieldAccessorTable
12619           .ensureFieldAccessorsInitialized(
12620               org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.Builder.class);
12621     }
12622 
12623     public static com.google.protobuf.Parser<UpdateFavoredNodesRequest> PARSER =
12624         new com.google.protobuf.AbstractParser<UpdateFavoredNodesRequest>() {
12625       public UpdateFavoredNodesRequest parsePartialFrom(
12626           com.google.protobuf.CodedInputStream input,
12627           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12628           throws com.google.protobuf.InvalidProtocolBufferException {
12629         return new UpdateFavoredNodesRequest(input, extensionRegistry);
12630       }
12631     };
12632 
12633     @java.lang.Override
12634     public com.google.protobuf.Parser<UpdateFavoredNodesRequest> getParserForType() {
12635       return PARSER;
12636     }
12637 
12638     public interface RegionUpdateInfoOrBuilder
12639         extends com.google.protobuf.MessageOrBuilder {
12640 
12641       // required .hbase.pb.RegionInfo region = 1;
12642       /**
12643        * <code>required .hbase.pb.RegionInfo region = 1;</code>
12644        */
12645       boolean hasRegion();
12646       /**
12647        * <code>required .hbase.pb.RegionInfo region = 1;</code>
12648        */
12649       org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegion();
12650       /**
12651        * <code>required .hbase.pb.RegionInfo region = 1;</code>
12652        */
12653       org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionOrBuilder();
12654 
12655       // repeated .hbase.pb.ServerName favored_nodes = 2;
12656       /**
12657        * <code>repeated .hbase.pb.ServerName favored_nodes = 2;</code>
12658        */
12659       java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName> 
12660           getFavoredNodesList();
12661       /**
12662        * <code>repeated .hbase.pb.ServerName favored_nodes = 2;</code>
12663        */
12664       org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getFavoredNodes(int index);
12665       /**
12666        * <code>repeated .hbase.pb.ServerName favored_nodes = 2;</code>
12667        */
12668       int getFavoredNodesCount();
12669       /**
12670        * <code>repeated .hbase.pb.ServerName favored_nodes = 2;</code>
12671        */
12672       java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> 
12673           getFavoredNodesOrBuilderList();
12674       /**
12675        * <code>repeated .hbase.pb.ServerName favored_nodes = 2;</code>
12676        */
12677       org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getFavoredNodesOrBuilder(
12678           int index);
12679     }
12680     /**
12681      * Protobuf type {@code hbase.pb.UpdateFavoredNodesRequest.RegionUpdateInfo}
12682      */
12683     public static final class RegionUpdateInfo extends
12684         com.google.protobuf.GeneratedMessage
12685         implements RegionUpdateInfoOrBuilder {
12686       // Use RegionUpdateInfo.newBuilder() to construct.
12687       private RegionUpdateInfo(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
12688         super(builder);
12689         this.unknownFields = builder.getUnknownFields();
12690       }
12691       private RegionUpdateInfo(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
12692 
12693       private static final RegionUpdateInfo defaultInstance;
12694       public static RegionUpdateInfo getDefaultInstance() {
12695         return defaultInstance;
12696       }
12697 
12698       public RegionUpdateInfo getDefaultInstanceForType() {
12699         return defaultInstance;
12700       }
12701 
12702       private final com.google.protobuf.UnknownFieldSet unknownFields;
12703       @java.lang.Override
12704       public final com.google.protobuf.UnknownFieldSet
12705           getUnknownFields() {
12706         return this.unknownFields;
12707       }
12708       private RegionUpdateInfo(
12709           com.google.protobuf.CodedInputStream input,
12710           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12711           throws com.google.protobuf.InvalidProtocolBufferException {
12712         initFields();
12713         int mutable_bitField0_ = 0;
12714         com.google.protobuf.UnknownFieldSet.Builder unknownFields =
12715             com.google.protobuf.UnknownFieldSet.newBuilder();
12716         try {
12717           boolean done = false;
12718           while (!done) {
12719             int tag = input.readTag();
12720             switch (tag) {
12721               case 0:
12722                 done = true;
12723                 break;
12724               default: {
12725                 if (!parseUnknownField(input, unknownFields,
12726                                        extensionRegistry, tag)) {
12727                   done = true;
12728                 }
12729                 break;
12730               }
12731               case 10: {
12732                 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder subBuilder = null;
12733                 if (((bitField0_ & 0x00000001) == 0x00000001)) {
12734                   subBuilder = region_.toBuilder();
12735                 }
12736                 region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.PARSER, extensionRegistry);
12737                 if (subBuilder != null) {
12738                   subBuilder.mergeFrom(region_);
12739                   region_ = subBuilder.buildPartial();
12740                 }
12741                 bitField0_ |= 0x00000001;
12742                 break;
12743               }
12744               case 18: {
12745                 if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
12746                   favoredNodes_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName>();
12747                   mutable_bitField0_ |= 0x00000002;
12748                 }
12749                 favoredNodes_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry));
12750                 break;
12751               }
12752             }
12753           }
12754         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
12755           throw e.setUnfinishedMessage(this);
12756         } catch (java.io.IOException e) {
12757           throw new com.google.protobuf.InvalidProtocolBufferException(
12758               e.getMessage()).setUnfinishedMessage(this);
12759         } finally {
12760           if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
12761             favoredNodes_ = java.util.Collections.unmodifiableList(favoredNodes_);
12762           }
12763           this.unknownFields = unknownFields.build();
12764           makeExtensionsImmutable();
12765         }
12766       }
12767       public static final com.google.protobuf.Descriptors.Descriptor
12768           getDescriptor() {
12769         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_UpdateFavoredNodesRequest_RegionUpdateInfo_descriptor;
12770       }
12771 
12772       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
12773           internalGetFieldAccessorTable() {
12774         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_UpdateFavoredNodesRequest_RegionUpdateInfo_fieldAccessorTable
12775             .ensureFieldAccessorsInitialized(
12776                 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo.Builder.class);
12777       }
12778 
12779       public static com.google.protobuf.Parser<RegionUpdateInfo> PARSER =
12780           new com.google.protobuf.AbstractParser<RegionUpdateInfo>() {
12781         public RegionUpdateInfo parsePartialFrom(
12782             com.google.protobuf.CodedInputStream input,
12783             com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12784             throws com.google.protobuf.InvalidProtocolBufferException {
12785           return new RegionUpdateInfo(input, extensionRegistry);
12786         }
12787       };
12788 
12789       @java.lang.Override
12790       public com.google.protobuf.Parser<RegionUpdateInfo> getParserForType() {
12791         return PARSER;
12792       }
12793 
12794       private int bitField0_;
12795       // required .hbase.pb.RegionInfo region = 1;
12796       public static final int REGION_FIELD_NUMBER = 1;
12797       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo region_;
12798       /**
12799        * <code>required .hbase.pb.RegionInfo region = 1;</code>
12800        */
12801       public boolean hasRegion() {
12802         return ((bitField0_ & 0x00000001) == 0x00000001);
12803       }
12804       /**
12805        * <code>required .hbase.pb.RegionInfo region = 1;</code>
12806        */
12807       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegion() {
12808         return region_;
12809       }
12810       /**
12811        * <code>required .hbase.pb.RegionInfo region = 1;</code>
12812        */
12813       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionOrBuilder() {
12814         return region_;
12815       }
12816 
12817       // repeated .hbase.pb.ServerName favored_nodes = 2;
12818       public static final int FAVORED_NODES_FIELD_NUMBER = 2;
12819       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName> favoredNodes_;
12820       /**
12821        * <code>repeated .hbase.pb.ServerName favored_nodes = 2;</code>
12822        */
12823       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName> getFavoredNodesList() {
12824         return favoredNodes_;
12825       }
12826       /**
12827        * <code>repeated .hbase.pb.ServerName favored_nodes = 2;</code>
12828        */
12829       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> 
12830           getFavoredNodesOrBuilderList() {
12831         return favoredNodes_;
12832       }
12833       /**
12834        * <code>repeated .hbase.pb.ServerName favored_nodes = 2;</code>
12835        */
12836       public int getFavoredNodesCount() {
12837         return favoredNodes_.size();
12838       }
12839       /**
12840        * <code>repeated .hbase.pb.ServerName favored_nodes = 2;</code>
12841        */
12842       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getFavoredNodes(int index) {
12843         return favoredNodes_.get(index);
12844       }
12845       /**
12846        * <code>repeated .hbase.pb.ServerName favored_nodes = 2;</code>
12847        */
12848       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getFavoredNodesOrBuilder(
12849           int index) {
12850         return favoredNodes_.get(index);
12851       }
12852 
12853       private void initFields() {
12854         region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance();
12855         favoredNodes_ = java.util.Collections.emptyList();
12856       }
12857       private byte memoizedIsInitialized = -1;
12858       public final boolean isInitialized() {
12859         byte isInitialized = memoizedIsInitialized;
12860         if (isInitialized != -1) return isInitialized == 1;
12861 
12862         if (!hasRegion()) {
12863           memoizedIsInitialized = 0;
12864           return false;
12865         }
12866         if (!getRegion().isInitialized()) {
12867           memoizedIsInitialized = 0;
12868           return false;
12869         }
12870         for (int i = 0; i < getFavoredNodesCount(); i++) {
12871           if (!getFavoredNodes(i).isInitialized()) {
12872             memoizedIsInitialized = 0;
12873             return false;
12874           }
12875         }
12876         memoizedIsInitialized = 1;
12877         return true;
12878       }
12879 
12880       public void writeTo(com.google.protobuf.CodedOutputStream output)
12881                           throws java.io.IOException {
12882         getSerializedSize();
12883         if (((bitField0_ & 0x00000001) == 0x00000001)) {
12884           output.writeMessage(1, region_);
12885         }
12886         for (int i = 0; i < favoredNodes_.size(); i++) {
12887           output.writeMessage(2, favoredNodes_.get(i));
12888         }
12889         getUnknownFields().writeTo(output);
12890       }
12891 
12892       private int memoizedSerializedSize = -1;
12893       public int getSerializedSize() {
12894         int size = memoizedSerializedSize;
12895         if (size != -1) return size;
12896 
12897         size = 0;
12898         if (((bitField0_ & 0x00000001) == 0x00000001)) {
12899           size += com.google.protobuf.CodedOutputStream
12900             .computeMessageSize(1, region_);
12901         }
12902         for (int i = 0; i < favoredNodes_.size(); i++) {
12903           size += com.google.protobuf.CodedOutputStream
12904             .computeMessageSize(2, favoredNodes_.get(i));
12905         }
12906         size += getUnknownFields().getSerializedSize();
12907         memoizedSerializedSize = size;
12908         return size;
12909       }
12910 
12911       private static final long serialVersionUID = 0L;
12912       @java.lang.Override
12913       protected java.lang.Object writeReplace()
12914           throws java.io.ObjectStreamException {
12915         return super.writeReplace();
12916       }
12917 
12918       @java.lang.Override
12919       public boolean equals(final java.lang.Object obj) {
12920         if (obj == this) {
12921          return true;
12922         }
12923         if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo)) {
12924           return super.equals(obj);
12925         }
12926         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo) obj;
12927 
12928         boolean result = true;
12929         result = result && (hasRegion() == other.hasRegion());
12930         if (hasRegion()) {
12931           result = result && getRegion()
12932               .equals(other.getRegion());
12933         }
12934         result = result && getFavoredNodesList()
12935             .equals(other.getFavoredNodesList());
12936         result = result &&
12937             getUnknownFields().equals(other.getUnknownFields());
12938         return result;
12939       }
12940 
12941       private int memoizedHashCode = 0;
12942       @java.lang.Override
12943       public int hashCode() {
12944         if (memoizedHashCode != 0) {
12945           return memoizedHashCode;
12946         }
12947         int hash = 41;
12948         hash = (19 * hash) + getDescriptorForType().hashCode();
12949         if (hasRegion()) {
12950           hash = (37 * hash) + REGION_FIELD_NUMBER;
12951           hash = (53 * hash) + getRegion().hashCode();
12952         }
12953         if (getFavoredNodesCount() > 0) {
12954           hash = (37 * hash) + FAVORED_NODES_FIELD_NUMBER;
12955           hash = (53 * hash) + getFavoredNodesList().hashCode();
12956         }
12957         hash = (29 * hash) + getUnknownFields().hashCode();
12958         memoizedHashCode = hash;
12959         return hash;
12960       }
12961 
12962       public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo parseFrom(
12963           com.google.protobuf.ByteString data)
12964           throws com.google.protobuf.InvalidProtocolBufferException {
12965         return PARSER.parseFrom(data);
12966       }
12967       public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo parseFrom(
12968           com.google.protobuf.ByteString data,
12969           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12970           throws com.google.protobuf.InvalidProtocolBufferException {
12971         return PARSER.parseFrom(data, extensionRegistry);
12972       }
12973       public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo parseFrom(byte[] data)
12974           throws com.google.protobuf.InvalidProtocolBufferException {
12975         return PARSER.parseFrom(data);
12976       }
12977       public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo parseFrom(
12978           byte[] data,
12979           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12980           throws com.google.protobuf.InvalidProtocolBufferException {
12981         return PARSER.parseFrom(data, extensionRegistry);
12982       }
12983       public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo parseFrom(java.io.InputStream input)
12984           throws java.io.IOException {
12985         return PARSER.parseFrom(input);
12986       }
12987       public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo parseFrom(
12988           java.io.InputStream input,
12989           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12990           throws java.io.IOException {
12991         return PARSER.parseFrom(input, extensionRegistry);
12992       }
12993       public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo parseDelimitedFrom(java.io.InputStream input)
12994           throws java.io.IOException {
12995         return PARSER.parseDelimitedFrom(input);
12996       }
12997       public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo parseDelimitedFrom(
12998           java.io.InputStream input,
12999           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13000           throws java.io.IOException {
13001         return PARSER.parseDelimitedFrom(input, extensionRegistry);
13002       }
13003       public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo parseFrom(
13004           com.google.protobuf.CodedInputStream input)
13005           throws java.io.IOException {
13006         return PARSER.parseFrom(input);
13007       }
13008       public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo parseFrom(
13009           com.google.protobuf.CodedInputStream input,
13010           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13011           throws java.io.IOException {
13012         return PARSER.parseFrom(input, extensionRegistry);
13013       }
13014 
13015       public static Builder newBuilder() { return Builder.create(); }
13016       public Builder newBuilderForType() { return newBuilder(); }
13017       public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo prototype) {
13018         return newBuilder().mergeFrom(prototype);
13019       }
13020       public Builder toBuilder() { return newBuilder(this); }
13021 
13022       @java.lang.Override
13023       protected Builder newBuilderForType(
13024           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
13025         Builder builder = new Builder(parent);
13026         return builder;
13027       }
13028       /**
13029        * Protobuf type {@code hbase.pb.UpdateFavoredNodesRequest.RegionUpdateInfo}
13030        */
13031       public static final class Builder extends
13032           com.google.protobuf.GeneratedMessage.Builder<Builder>
13033          implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfoOrBuilder {
13034         public static final com.google.protobuf.Descriptors.Descriptor
13035             getDescriptor() {
13036           return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_UpdateFavoredNodesRequest_RegionUpdateInfo_descriptor;
13037         }
13038 
13039         protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
13040             internalGetFieldAccessorTable() {
13041           return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_UpdateFavoredNodesRequest_RegionUpdateInfo_fieldAccessorTable
13042               .ensureFieldAccessorsInitialized(
13043                   org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo.Builder.class);
13044         }
13045 
13046         // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo.newBuilder()
13047         private Builder() {
13048           maybeForceBuilderInitialization();
13049         }
13050 
13051         private Builder(
13052             com.google.protobuf.GeneratedMessage.BuilderParent parent) {
13053           super(parent);
13054           maybeForceBuilderInitialization();
13055         }
13056         private void maybeForceBuilderInitialization() {
13057           if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
13058             getRegionFieldBuilder();
13059             getFavoredNodesFieldBuilder();
13060           }
13061         }
13062         private static Builder create() {
13063           return new Builder();
13064         }
13065 
13066         public Builder clear() {
13067           super.clear();
13068           if (regionBuilder_ == null) {
13069             region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance();
13070           } else {
13071             regionBuilder_.clear();
13072           }
13073           bitField0_ = (bitField0_ & ~0x00000001);
13074           if (favoredNodesBuilder_ == null) {
13075             favoredNodes_ = java.util.Collections.emptyList();
13076             bitField0_ = (bitField0_ & ~0x00000002);
13077           } else {
13078             favoredNodesBuilder_.clear();
13079           }
13080           return this;
13081         }
13082 
13083         public Builder clone() {
13084           return create().mergeFrom(buildPartial());
13085         }
13086 
13087         public com.google.protobuf.Descriptors.Descriptor
13088             getDescriptorForType() {
13089           return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_UpdateFavoredNodesRequest_RegionUpdateInfo_descriptor;
13090         }
13091 
13092         public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo getDefaultInstanceForType() {
13093           return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo.getDefaultInstance();
13094         }
13095 
13096         public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo build() {
13097           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo result = buildPartial();
13098           if (!result.isInitialized()) {
13099             throw newUninitializedMessageException(result);
13100           }
13101           return result;
13102         }
13103 
13104         public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo buildPartial() {
13105           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo(this);
13106           int from_bitField0_ = bitField0_;
13107           int to_bitField0_ = 0;
13108           if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
13109             to_bitField0_ |= 0x00000001;
13110           }
13111           if (regionBuilder_ == null) {
13112             result.region_ = region_;
13113           } else {
13114             result.region_ = regionBuilder_.build();
13115           }
13116           if (favoredNodesBuilder_ == null) {
13117             if (((bitField0_ & 0x00000002) == 0x00000002)) {
13118               favoredNodes_ = java.util.Collections.unmodifiableList(favoredNodes_);
13119               bitField0_ = (bitField0_ & ~0x00000002);
13120             }
13121             result.favoredNodes_ = favoredNodes_;
13122           } else {
13123             result.favoredNodes_ = favoredNodesBuilder_.build();
13124           }
13125           result.bitField0_ = to_bitField0_;
13126           onBuilt();
13127           return result;
13128         }
13129 
13130         public Builder mergeFrom(com.google.protobuf.Message other) {
13131           if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo) {
13132             return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo)other);
13133           } else {
13134             super.mergeFrom(other);
13135             return this;
13136           }
13137         }
13138 
13139         public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo other) {
13140           if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo.getDefaultInstance()) return this;
13141           if (other.hasRegion()) {
13142             mergeRegion(other.getRegion());
13143           }
13144           if (favoredNodesBuilder_ == null) {
13145             if (!other.favoredNodes_.isEmpty()) {
13146               if (favoredNodes_.isEmpty()) {
13147                 favoredNodes_ = other.favoredNodes_;
13148                 bitField0_ = (bitField0_ & ~0x00000002);
13149               } else {
13150                 ensureFavoredNodesIsMutable();
13151                 favoredNodes_.addAll(other.favoredNodes_);
13152               }
13153               onChanged();
13154             }
13155           } else {
13156             if (!other.favoredNodes_.isEmpty()) {
13157               if (favoredNodesBuilder_.isEmpty()) {
13158                 favoredNodesBuilder_.dispose();
13159                 favoredNodesBuilder_ = null;
13160                 favoredNodes_ = other.favoredNodes_;
13161                 bitField0_ = (bitField0_ & ~0x00000002);
13162                 favoredNodesBuilder_ = 
13163                   com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
13164                      getFavoredNodesFieldBuilder() : null;
13165               } else {
13166                 favoredNodesBuilder_.addAllMessages(other.favoredNodes_);
13167               }
13168             }
13169           }
13170           this.mergeUnknownFields(other.getUnknownFields());
13171           return this;
13172         }
13173 
13174         public final boolean isInitialized() {
13175           if (!hasRegion()) {
13176             
13177             return false;
13178           }
13179           if (!getRegion().isInitialized()) {
13180             
13181             return false;
13182           }
13183           for (int i = 0; i < getFavoredNodesCount(); i++) {
13184             if (!getFavoredNodes(i).isInitialized()) {
13185               
13186               return false;
13187             }
13188           }
13189           return true;
13190         }
13191 
13192         public Builder mergeFrom(
13193             com.google.protobuf.CodedInputStream input,
13194             com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13195             throws java.io.IOException {
13196           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo parsedMessage = null;
13197           try {
13198             parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
13199           } catch (com.google.protobuf.InvalidProtocolBufferException e) {
13200             parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo) e.getUnfinishedMessage();
13201             throw e;
13202           } finally {
13203             if (parsedMessage != null) {
13204               mergeFrom(parsedMessage);
13205             }
13206           }
13207           return this;
13208         }
13209         private int bitField0_;
13210 
13211         // required .hbase.pb.RegionInfo region = 1;
13212         private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance();
13213         private com.google.protobuf.SingleFieldBuilder<
13214             org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionBuilder_;
13215         /**
13216          * <code>required .hbase.pb.RegionInfo region = 1;</code>
13217          */
13218         public boolean hasRegion() {
13219           return ((bitField0_ & 0x00000001) == 0x00000001);
13220         }
13221         /**
13222          * <code>required .hbase.pb.RegionInfo region = 1;</code>
13223          */
13224         public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegion() {
13225           if (regionBuilder_ == null) {
13226             return region_;
13227           } else {
13228             return regionBuilder_.getMessage();
13229           }
13230         }
13231         /**
13232          * <code>required .hbase.pb.RegionInfo region = 1;</code>
13233          */
13234         public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) {
13235           if (regionBuilder_ == null) {
13236             if (value == null) {
13237               throw new NullPointerException();
13238             }
13239             region_ = value;
13240             onChanged();
13241           } else {
13242             regionBuilder_.setMessage(value);
13243           }
13244           bitField0_ |= 0x00000001;
13245           return this;
13246         }
13247         /**
13248          * <code>required .hbase.pb.RegionInfo region = 1;</code>
13249          */
13250         public Builder setRegion(
13251             org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) {
13252           if (regionBuilder_ == null) {
13253             region_ = builderForValue.build();
13254             onChanged();
13255           } else {
13256             regionBuilder_.setMessage(builderForValue.build());
13257           }
13258           bitField0_ |= 0x00000001;
13259           return this;
13260         }
13261         /**
13262          * <code>required .hbase.pb.RegionInfo region = 1;</code>
13263          */
13264         public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) {
13265           if (regionBuilder_ == null) {
13266             if (((bitField0_ & 0x00000001) == 0x00000001) &&
13267                 region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance()) {
13268               region_ =
13269                 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.newBuilder(region_).mergeFrom(value).buildPartial();
13270             } else {
13271               region_ = value;
13272             }
13273             onChanged();
13274           } else {
13275             regionBuilder_.mergeFrom(value);
13276           }
13277           bitField0_ |= 0x00000001;
13278           return this;
13279         }
13280         /**
13281          * <code>required .hbase.pb.RegionInfo region = 1;</code>
13282          */
13283         public Builder clearRegion() {
13284           if (regionBuilder_ == null) {
13285             region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance();
13286             onChanged();
13287           } else {
13288             regionBuilder_.clear();
13289           }
13290           bitField0_ = (bitField0_ & ~0x00000001);
13291           return this;
13292         }
13293         /**
13294          * <code>required .hbase.pb.RegionInfo region = 1;</code>
13295          */
13296         public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder getRegionBuilder() {
13297           bitField0_ |= 0x00000001;
13298           onChanged();
13299           return getRegionFieldBuilder().getBuilder();
13300         }
13301         /**
13302          * <code>required .hbase.pb.RegionInfo region = 1;</code>
13303          */
13304         public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionOrBuilder() {
13305           if (regionBuilder_ != null) {
13306             return regionBuilder_.getMessageOrBuilder();
13307           } else {
13308             return region_;
13309           }
13310         }
13311         /**
13312          * <code>required .hbase.pb.RegionInfo region = 1;</code>
13313          */
13314         private com.google.protobuf.SingleFieldBuilder<
13315             org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> 
13316             getRegionFieldBuilder() {
13317           if (regionBuilder_ == null) {
13318             regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
13319                 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>(
13320                     region_,
13321                     getParentForChildren(),
13322                     isClean());
13323             region_ = null;
13324           }
13325           return regionBuilder_;
13326         }
13327 
13328         // repeated .hbase.pb.ServerName favored_nodes = 2;
13329         private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName> favoredNodes_ =
13330           java.util.Collections.emptyList();
13331         private void ensureFavoredNodesIsMutable() {
13332           if (!((bitField0_ & 0x00000002) == 0x00000002)) {
13333             favoredNodes_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName>(favoredNodes_);
13334             bitField0_ |= 0x00000002;
13335            }
13336         }
13337 
13338         private com.google.protobuf.RepeatedFieldBuilder<
13339             org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> favoredNodesBuilder_;
13340 
13341         /**
13342          * <code>repeated .hbase.pb.ServerName favored_nodes = 2;</code>
13343          */
13344         public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName> getFavoredNodesList() {
13345           if (favoredNodesBuilder_ == null) {
13346             return java.util.Collections.unmodifiableList(favoredNodes_);
13347           } else {
13348             return favoredNodesBuilder_.getMessageList();
13349           }
13350         }
13351         /**
13352          * <code>repeated .hbase.pb.ServerName favored_nodes = 2;</code>
13353          */
13354         public int getFavoredNodesCount() {
13355           if (favoredNodesBuilder_ == null) {
13356             return favoredNodes_.size();
13357           } else {
13358             return favoredNodesBuilder_.getCount();
13359           }
13360         }
13361         /**
13362          * <code>repeated .hbase.pb.ServerName favored_nodes = 2;</code>
13363          */
13364         public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getFavoredNodes(int index) {
13365           if (favoredNodesBuilder_ == null) {
13366             return favoredNodes_.get(index);
13367           } else {
13368             return favoredNodesBuilder_.getMessage(index);
13369           }
13370         }
13371         /**
13372          * <code>repeated .hbase.pb.ServerName favored_nodes = 2;</code>
13373          */
13374         public Builder setFavoredNodes(
13375             int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) {
13376           if (favoredNodesBuilder_ == null) {
13377             if (value == null) {
13378               throw new NullPointerException();
13379             }
13380             ensureFavoredNodesIsMutable();
13381             favoredNodes_.set(index, value);
13382             onChanged();
13383           } else {
13384             favoredNodesBuilder_.setMessage(index, value);
13385           }
13386           return this;
13387         }
13388         /**
13389          * <code>repeated .hbase.pb.ServerName favored_nodes = 2;</code>
13390          */
13391         public Builder setFavoredNodes(
13392             int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) {
13393           if (favoredNodesBuilder_ == null) {
13394             ensureFavoredNodesIsMutable();
13395             favoredNodes_.set(index, builderForValue.build());
13396             onChanged();
13397           } else {
13398             favoredNodesBuilder_.setMessage(index, builderForValue.build());
13399           }
13400           return this;
13401         }
13402         /**
13403          * <code>repeated .hbase.pb.ServerName favored_nodes = 2;</code>
13404          */
13405         public Builder addFavoredNodes(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) {
13406           if (favoredNodesBuilder_ == null) {
13407             if (value == null) {
13408               throw new NullPointerException();
13409             }
13410             ensureFavoredNodesIsMutable();
13411             favoredNodes_.add(value);
13412             onChanged();
13413           } else {
13414             favoredNodesBuilder_.addMessage(value);
13415           }
13416           return this;
13417         }
13418         /**
13419          * <code>repeated .hbase.pb.ServerName favored_nodes = 2;</code>
13420          */
13421         public Builder addFavoredNodes(
13422             int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) {
13423           if (favoredNodesBuilder_ == null) {
13424             if (value == null) {
13425               throw new NullPointerException();
13426             }
13427             ensureFavoredNodesIsMutable();
13428             favoredNodes_.add(index, value);
13429             onChanged();
13430           } else {
13431             favoredNodesBuilder_.addMessage(index, value);
13432           }
13433           return this;
13434         }
13435         /**
13436          * <code>repeated .hbase.pb.ServerName favored_nodes = 2;</code>
13437          */
13438         public Builder addFavoredNodes(
13439             org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) {
13440           if (favoredNodesBuilder_ == null) {
13441             ensureFavoredNodesIsMutable();
13442             favoredNodes_.add(builderForValue.build());
13443             onChanged();
13444           } else {
13445             favoredNodesBuilder_.addMessage(builderForValue.build());
13446           }
13447           return this;
13448         }
13449         /**
13450          * <code>repeated .hbase.pb.ServerName favored_nodes = 2;</code>
13451          */
13452         public Builder addFavoredNodes(
13453             int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) {
13454           if (favoredNodesBuilder_ == null) {
13455             ensureFavoredNodesIsMutable();
13456             favoredNodes_.add(index, builderForValue.build());
13457             onChanged();
13458           } else {
13459             favoredNodesBuilder_.addMessage(index, builderForValue.build());
13460           }
13461           return this;
13462         }
13463         /**
13464          * <code>repeated .hbase.pb.ServerName favored_nodes = 2;</code>
13465          */
13466         public Builder addAllFavoredNodes(
13467             java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName> values) {
13468           if (favoredNodesBuilder_ == null) {
13469             ensureFavoredNodesIsMutable();
13470             super.addAll(values, favoredNodes_);
13471             onChanged();
13472           } else {
13473             favoredNodesBuilder_.addAllMessages(values);
13474           }
13475           return this;
13476         }
13477         /**
13478          * <code>repeated .hbase.pb.ServerName favored_nodes = 2;</code>
13479          */
13480         public Builder clearFavoredNodes() {
13481           if (favoredNodesBuilder_ == null) {
13482             favoredNodes_ = java.util.Collections.emptyList();
13483             bitField0_ = (bitField0_ & ~0x00000002);
13484             onChanged();
13485           } else {
13486             favoredNodesBuilder_.clear();
13487           }
13488           return this;
13489         }
13490         /**
13491          * <code>repeated .hbase.pb.ServerName favored_nodes = 2;</code>
13492          */
13493         public Builder removeFavoredNodes(int index) {
13494           if (favoredNodesBuilder_ == null) {
13495             ensureFavoredNodesIsMutable();
13496             favoredNodes_.remove(index);
13497             onChanged();
13498           } else {
13499             favoredNodesBuilder_.remove(index);
13500           }
13501           return this;
13502         }
13503         /**
13504          * <code>repeated .hbase.pb.ServerName favored_nodes = 2;</code>
13505          */
13506         public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getFavoredNodesBuilder(
13507             int index) {
13508           return getFavoredNodesFieldBuilder().getBuilder(index);
13509         }
13510         /**
13511          * <code>repeated .hbase.pb.ServerName favored_nodes = 2;</code>
13512          */
13513         public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getFavoredNodesOrBuilder(
13514             int index) {
13515           if (favoredNodesBuilder_ == null) {
13516             return favoredNodes_.get(index);  } else {
13517             return favoredNodesBuilder_.getMessageOrBuilder(index);
13518           }
13519         }
13520         /**
13521          * <code>repeated .hbase.pb.ServerName favored_nodes = 2;</code>
13522          */
13523         public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> 
13524              getFavoredNodesOrBuilderList() {
13525           if (favoredNodesBuilder_ != null) {
13526             return favoredNodesBuilder_.getMessageOrBuilderList();
13527           } else {
13528             return java.util.Collections.unmodifiableList(favoredNodes_);
13529           }
13530         }
13531         /**
13532          * <code>repeated .hbase.pb.ServerName favored_nodes = 2;</code>
13533          */
13534         public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder addFavoredNodesBuilder() {
13535           return getFavoredNodesFieldBuilder().addBuilder(
13536               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance());
13537         }
13538         /**
13539          * <code>repeated .hbase.pb.ServerName favored_nodes = 2;</code>
13540          */
13541         public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder addFavoredNodesBuilder(
13542             int index) {
13543           return getFavoredNodesFieldBuilder().addBuilder(
13544               index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance());
13545         }
13546         /**
13547          * <code>repeated .hbase.pb.ServerName favored_nodes = 2;</code>
13548          */
13549         public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder> 
13550              getFavoredNodesBuilderList() {
13551           return getFavoredNodesFieldBuilder().getBuilderList();
13552         }
13553         private com.google.protobuf.RepeatedFieldBuilder<
13554             org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> 
13555             getFavoredNodesFieldBuilder() {
13556           if (favoredNodesBuilder_ == null) {
13557             favoredNodesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
13558                 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>(
13559                     favoredNodes_,
13560                     ((bitField0_ & 0x00000002) == 0x00000002),
13561                     getParentForChildren(),
13562                     isClean());
13563             favoredNodes_ = null;
13564           }
13565           return favoredNodesBuilder_;
13566         }
13567 
13568         // @@protoc_insertion_point(builder_scope:hbase.pb.UpdateFavoredNodesRequest.RegionUpdateInfo)
13569       }
13570 
13571       static {
13572         defaultInstance = new RegionUpdateInfo(true);
13573         defaultInstance.initFields();
13574       }
13575 
13576       // @@protoc_insertion_point(class_scope:hbase.pb.UpdateFavoredNodesRequest.RegionUpdateInfo)
13577     }
13578 
13579     // repeated .hbase.pb.UpdateFavoredNodesRequest.RegionUpdateInfo update_info = 1;
13580     public static final int UPDATE_INFO_FIELD_NUMBER = 1;
13581     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo> updateInfo_;
13582     /**
13583      * <code>repeated .hbase.pb.UpdateFavoredNodesRequest.RegionUpdateInfo update_info = 1;</code>
13584      */
13585     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo> getUpdateInfoList() {
13586       return updateInfo_;
13587     }
13588     /**
13589      * <code>repeated .hbase.pb.UpdateFavoredNodesRequest.RegionUpdateInfo update_info = 1;</code>
13590      */
13591     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfoOrBuilder> 
13592         getUpdateInfoOrBuilderList() {
13593       return updateInfo_;
13594     }
13595     /**
13596      * <code>repeated .hbase.pb.UpdateFavoredNodesRequest.RegionUpdateInfo update_info = 1;</code>
13597      */
13598     public int getUpdateInfoCount() {
13599       return updateInfo_.size();
13600     }
13601     /**
13602      * <code>repeated .hbase.pb.UpdateFavoredNodesRequest.RegionUpdateInfo update_info = 1;</code>
13603      */
13604     public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo getUpdateInfo(int index) {
13605       return updateInfo_.get(index);
13606     }
13607     /**
13608      * <code>repeated .hbase.pb.UpdateFavoredNodesRequest.RegionUpdateInfo update_info = 1;</code>
13609      */
13610     public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfoOrBuilder getUpdateInfoOrBuilder(
13611         int index) {
13612       return updateInfo_.get(index);
13613     }
13614 
13615     private void initFields() {
13616       updateInfo_ = java.util.Collections.emptyList();
13617     }
13618     private byte memoizedIsInitialized = -1;
13619     public final boolean isInitialized() {
13620       byte isInitialized = memoizedIsInitialized;
13621       if (isInitialized != -1) return isInitialized == 1;
13622 
13623       for (int i = 0; i < getUpdateInfoCount(); i++) {
13624         if (!getUpdateInfo(i).isInitialized()) {
13625           memoizedIsInitialized = 0;
13626           return false;
13627         }
13628       }
13629       memoizedIsInitialized = 1;
13630       return true;
13631     }
13632 
13633     public void writeTo(com.google.protobuf.CodedOutputStream output)
13634                         throws java.io.IOException {
13635       getSerializedSize();
13636       for (int i = 0; i < updateInfo_.size(); i++) {
13637         output.writeMessage(1, updateInfo_.get(i));
13638       }
13639       getUnknownFields().writeTo(output);
13640     }
13641 
13642     private int memoizedSerializedSize = -1;
13643     public int getSerializedSize() {
13644       int size = memoizedSerializedSize;
13645       if (size != -1) return size;
13646 
13647       size = 0;
13648       for (int i = 0; i < updateInfo_.size(); i++) {
13649         size += com.google.protobuf.CodedOutputStream
13650           .computeMessageSize(1, updateInfo_.get(i));
13651       }
13652       size += getUnknownFields().getSerializedSize();
13653       memoizedSerializedSize = size;
13654       return size;
13655     }
13656 
13657     private static final long serialVersionUID = 0L;
13658     @java.lang.Override
13659     protected java.lang.Object writeReplace()
13660         throws java.io.ObjectStreamException {
13661       return super.writeReplace();
13662     }
13663 
13664     @java.lang.Override
13665     public boolean equals(final java.lang.Object obj) {
13666       if (obj == this) {
13667        return true;
13668       }
13669       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest)) {
13670         return super.equals(obj);
13671       }
13672       org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest) obj;
13673 
13674       boolean result = true;
13675       result = result && getUpdateInfoList()
13676           .equals(other.getUpdateInfoList());
13677       result = result &&
13678           getUnknownFields().equals(other.getUnknownFields());
13679       return result;
13680     }
13681 
13682     private int memoizedHashCode = 0;
13683     @java.lang.Override
13684     public int hashCode() {
13685       if (memoizedHashCode != 0) {
13686         return memoizedHashCode;
13687       }
13688       int hash = 41;
13689       hash = (19 * hash) + getDescriptorForType().hashCode();
13690       if (getUpdateInfoCount() > 0) {
13691         hash = (37 * hash) + UPDATE_INFO_FIELD_NUMBER;
13692         hash = (53 * hash) + getUpdateInfoList().hashCode();
13693       }
13694       hash = (29 * hash) + getUnknownFields().hashCode();
13695       memoizedHashCode = hash;
13696       return hash;
13697     }
13698 
13699     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest parseFrom(
13700         com.google.protobuf.ByteString data)
13701         throws com.google.protobuf.InvalidProtocolBufferException {
13702       return PARSER.parseFrom(data);
13703     }
13704     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest parseFrom(
13705         com.google.protobuf.ByteString data,
13706         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13707         throws com.google.protobuf.InvalidProtocolBufferException {
13708       return PARSER.parseFrom(data, extensionRegistry);
13709     }
13710     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest parseFrom(byte[] data)
13711         throws com.google.protobuf.InvalidProtocolBufferException {
13712       return PARSER.parseFrom(data);
13713     }
13714     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest parseFrom(
13715         byte[] data,
13716         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13717         throws com.google.protobuf.InvalidProtocolBufferException {
13718       return PARSER.parseFrom(data, extensionRegistry);
13719     }
13720     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest parseFrom(java.io.InputStream input)
13721         throws java.io.IOException {
13722       return PARSER.parseFrom(input);
13723     }
13724     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest parseFrom(
13725         java.io.InputStream input,
13726         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13727         throws java.io.IOException {
13728       return PARSER.parseFrom(input, extensionRegistry);
13729     }
13730     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest parseDelimitedFrom(java.io.InputStream input)
13731         throws java.io.IOException {
13732       return PARSER.parseDelimitedFrom(input);
13733     }
13734     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest parseDelimitedFrom(
13735         java.io.InputStream input,
13736         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13737         throws java.io.IOException {
13738       return PARSER.parseDelimitedFrom(input, extensionRegistry);
13739     }
13740     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest parseFrom(
13741         com.google.protobuf.CodedInputStream input)
13742         throws java.io.IOException {
13743       return PARSER.parseFrom(input);
13744     }
13745     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest parseFrom(
13746         com.google.protobuf.CodedInputStream input,
13747         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13748         throws java.io.IOException {
13749       return PARSER.parseFrom(input, extensionRegistry);
13750     }
13751 
13752     public static Builder newBuilder() { return Builder.create(); }
13753     public Builder newBuilderForType() { return newBuilder(); }
13754     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest prototype) {
13755       return newBuilder().mergeFrom(prototype);
13756     }
13757     public Builder toBuilder() { return newBuilder(this); }
13758 
13759     @java.lang.Override
13760     protected Builder newBuilderForType(
13761         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
13762       Builder builder = new Builder(parent);
13763       return builder;
13764     }
13765     /**
13766      * Protobuf type {@code hbase.pb.UpdateFavoredNodesRequest}
13767      */
13768     public static final class Builder extends
13769         com.google.protobuf.GeneratedMessage.Builder<Builder>
13770        implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequestOrBuilder {
13771       public static final com.google.protobuf.Descriptors.Descriptor
13772           getDescriptor() {
13773         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_UpdateFavoredNodesRequest_descriptor;
13774       }
13775 
13776       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
13777           internalGetFieldAccessorTable() {
13778         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_UpdateFavoredNodesRequest_fieldAccessorTable
13779             .ensureFieldAccessorsInitialized(
13780                 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.Builder.class);
13781       }
13782 
13783       // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.newBuilder()
13784       private Builder() {
13785         maybeForceBuilderInitialization();
13786       }
13787 
13788       private Builder(
13789           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
13790         super(parent);
13791         maybeForceBuilderInitialization();
13792       }
13793       private void maybeForceBuilderInitialization() {
13794         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
13795           getUpdateInfoFieldBuilder();
13796         }
13797       }
13798       private static Builder create() {
13799         return new Builder();
13800       }
13801 
13802       public Builder clear() {
13803         super.clear();
13804         if (updateInfoBuilder_ == null) {
13805           updateInfo_ = java.util.Collections.emptyList();
13806           bitField0_ = (bitField0_ & ~0x00000001);
13807         } else {
13808           updateInfoBuilder_.clear();
13809         }
13810         return this;
13811       }
13812 
13813       public Builder clone() {
13814         return create().mergeFrom(buildPartial());
13815       }
13816 
13817       public com.google.protobuf.Descriptors.Descriptor
13818           getDescriptorForType() {
13819         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_UpdateFavoredNodesRequest_descriptor;
13820       }
13821 
13822       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest getDefaultInstanceForType() {
13823         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.getDefaultInstance();
13824       }
13825 
13826       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest build() {
13827         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest result = buildPartial();
13828         if (!result.isInitialized()) {
13829           throw newUninitializedMessageException(result);
13830         }
13831         return result;
13832       }
13833 
13834       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest buildPartial() {
13835         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest(this);
13836         int from_bitField0_ = bitField0_;
13837         if (updateInfoBuilder_ == null) {
13838           if (((bitField0_ & 0x00000001) == 0x00000001)) {
13839             updateInfo_ = java.util.Collections.unmodifiableList(updateInfo_);
13840             bitField0_ = (bitField0_ & ~0x00000001);
13841           }
13842           result.updateInfo_ = updateInfo_;
13843         } else {
13844           result.updateInfo_ = updateInfoBuilder_.build();
13845         }
13846         onBuilt();
13847         return result;
13848       }
13849 
13850       public Builder mergeFrom(com.google.protobuf.Message other) {
13851         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest) {
13852           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest)other);
13853         } else {
13854           super.mergeFrom(other);
13855           return this;
13856         }
13857       }
13858 
13859       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest other) {
13860         if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.getDefaultInstance()) return this;
13861         if (updateInfoBuilder_ == null) {
13862           if (!other.updateInfo_.isEmpty()) {
13863             if (updateInfo_.isEmpty()) {
13864               updateInfo_ = other.updateInfo_;
13865               bitField0_ = (bitField0_ & ~0x00000001);
13866             } else {
13867               ensureUpdateInfoIsMutable();
13868               updateInfo_.addAll(other.updateInfo_);
13869             }
13870             onChanged();
13871           }
13872         } else {
13873           if (!other.updateInfo_.isEmpty()) {
13874             if (updateInfoBuilder_.isEmpty()) {
13875               updateInfoBuilder_.dispose();
13876               updateInfoBuilder_ = null;
13877               updateInfo_ = other.updateInfo_;
13878               bitField0_ = (bitField0_ & ~0x00000001);
13879               updateInfoBuilder_ = 
13880                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
13881                    getUpdateInfoFieldBuilder() : null;
13882             } else {
13883               updateInfoBuilder_.addAllMessages(other.updateInfo_);
13884             }
13885           }
13886         }
13887         this.mergeUnknownFields(other.getUnknownFields());
13888         return this;
13889       }
13890 
13891       public final boolean isInitialized() {
13892         for (int i = 0; i < getUpdateInfoCount(); i++) {
13893           if (!getUpdateInfo(i).isInitialized()) {
13894             
13895             return false;
13896           }
13897         }
13898         return true;
13899       }
13900 
13901       public Builder mergeFrom(
13902           com.google.protobuf.CodedInputStream input,
13903           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13904           throws java.io.IOException {
13905         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest parsedMessage = null;
13906         try {
13907           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
13908         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
13909           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest) e.getUnfinishedMessage();
13910           throw e;
13911         } finally {
13912           if (parsedMessage != null) {
13913             mergeFrom(parsedMessage);
13914           }
13915         }
13916         return this;
13917       }
13918       private int bitField0_;
13919 
13920       // repeated .hbase.pb.UpdateFavoredNodesRequest.RegionUpdateInfo update_info = 1;
13921       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo> updateInfo_ =
13922         java.util.Collections.emptyList();
13923       private void ensureUpdateInfoIsMutable() {
13924         if (!((bitField0_ & 0x00000001) == 0x00000001)) {
13925           updateInfo_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo>(updateInfo_);
13926           bitField0_ |= 0x00000001;
13927          }
13928       }
13929 
13930       private com.google.protobuf.RepeatedFieldBuilder<
13931           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfoOrBuilder> updateInfoBuilder_;
13932 
13933       /**
13934        * <code>repeated .hbase.pb.UpdateFavoredNodesRequest.RegionUpdateInfo update_info = 1;</code>
13935        */
13936       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo> getUpdateInfoList() {
13937         if (updateInfoBuilder_ == null) {
13938           return java.util.Collections.unmodifiableList(updateInfo_);
13939         } else {
13940           return updateInfoBuilder_.getMessageList();
13941         }
13942       }
13943       /**
13944        * <code>repeated .hbase.pb.UpdateFavoredNodesRequest.RegionUpdateInfo update_info = 1;</code>
13945        */
13946       public int getUpdateInfoCount() {
13947         if (updateInfoBuilder_ == null) {
13948           return updateInfo_.size();
13949         } else {
13950           return updateInfoBuilder_.getCount();
13951         }
13952       }
13953       /**
13954        * <code>repeated .hbase.pb.UpdateFavoredNodesRequest.RegionUpdateInfo update_info = 1;</code>
13955        */
13956       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo getUpdateInfo(int index) {
13957         if (updateInfoBuilder_ == null) {
13958           return updateInfo_.get(index);
13959         } else {
13960           return updateInfoBuilder_.getMessage(index);
13961         }
13962       }
13963       /**
13964        * <code>repeated .hbase.pb.UpdateFavoredNodesRequest.RegionUpdateInfo update_info = 1;</code>
13965        */
13966       public Builder setUpdateInfo(
13967           int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo value) {
13968         if (updateInfoBuilder_ == null) {
13969           if (value == null) {
13970             throw new NullPointerException();
13971           }
13972           ensureUpdateInfoIsMutable();
13973           updateInfo_.set(index, value);
13974           onChanged();
13975         } else {
13976           updateInfoBuilder_.setMessage(index, value);
13977         }
13978         return this;
13979       }
13980       /**
13981        * <code>repeated .hbase.pb.UpdateFavoredNodesRequest.RegionUpdateInfo update_info = 1;</code>
13982        */
13983       public Builder setUpdateInfo(
13984           int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo.Builder builderForValue) {
13985         if (updateInfoBuilder_ == null) {
13986           ensureUpdateInfoIsMutable();
13987           updateInfo_.set(index, builderForValue.build());
13988           onChanged();
13989         } else {
13990           updateInfoBuilder_.setMessage(index, builderForValue.build());
13991         }
13992         return this;
13993       }
13994       /**
13995        * <code>repeated .hbase.pb.UpdateFavoredNodesRequest.RegionUpdateInfo update_info = 1;</code>
13996        */
13997       public Builder addUpdateInfo(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo value) {
13998         if (updateInfoBuilder_ == null) {
13999           if (value == null) {
14000             throw new NullPointerException();
14001           }
14002           ensureUpdateInfoIsMutable();
14003           updateInfo_.add(value);
14004           onChanged();
14005         } else {
14006           updateInfoBuilder_.addMessage(value);
14007         }
14008         return this;
14009       }
14010       /**
14011        * <code>repeated .hbase.pb.UpdateFavoredNodesRequest.RegionUpdateInfo update_info = 1;</code>
14012        */
14013       public Builder addUpdateInfo(
14014           int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo value) {
14015         if (updateInfoBuilder_ == null) {
14016           if (value == null) {
14017             throw new NullPointerException();
14018           }
14019           ensureUpdateInfoIsMutable();
14020           updateInfo_.add(index, value);
14021           onChanged();
14022         } else {
14023           updateInfoBuilder_.addMessage(index, value);
14024         }
14025         return this;
14026       }
14027       /**
14028        * <code>repeated .hbase.pb.UpdateFavoredNodesRequest.RegionUpdateInfo update_info = 1;</code>
14029        */
14030       public Builder addUpdateInfo(
14031           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo.Builder builderForValue) {
14032         if (updateInfoBuilder_ == null) {
14033           ensureUpdateInfoIsMutable();
14034           updateInfo_.add(builderForValue.build());
14035           onChanged();
14036         } else {
14037           updateInfoBuilder_.addMessage(builderForValue.build());
14038         }
14039         return this;
14040       }
14041       /**
14042        * <code>repeated .hbase.pb.UpdateFavoredNodesRequest.RegionUpdateInfo update_info = 1;</code>
14043        */
14044       public Builder addUpdateInfo(
14045           int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo.Builder builderForValue) {
14046         if (updateInfoBuilder_ == null) {
14047           ensureUpdateInfoIsMutable();
14048           updateInfo_.add(index, builderForValue.build());
14049           onChanged();
14050         } else {
14051           updateInfoBuilder_.addMessage(index, builderForValue.build());
14052         }
14053         return this;
14054       }
14055       /**
14056        * <code>repeated .hbase.pb.UpdateFavoredNodesRequest.RegionUpdateInfo update_info = 1;</code>
14057        */
14058       public Builder addAllUpdateInfo(
14059           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo> values) {
14060         if (updateInfoBuilder_ == null) {
14061           ensureUpdateInfoIsMutable();
14062           super.addAll(values, updateInfo_);
14063           onChanged();
14064         } else {
14065           updateInfoBuilder_.addAllMessages(values);
14066         }
14067         return this;
14068       }
14069       /**
14070        * <code>repeated .hbase.pb.UpdateFavoredNodesRequest.RegionUpdateInfo update_info = 1;</code>
14071        */
14072       public Builder clearUpdateInfo() {
14073         if (updateInfoBuilder_ == null) {
14074           updateInfo_ = java.util.Collections.emptyList();
14075           bitField0_ = (bitField0_ & ~0x00000001);
14076           onChanged();
14077         } else {
14078           updateInfoBuilder_.clear();
14079         }
14080         return this;
14081       }
14082       /**
14083        * <code>repeated .hbase.pb.UpdateFavoredNodesRequest.RegionUpdateInfo update_info = 1;</code>
14084        */
14085       public Builder removeUpdateInfo(int index) {
14086         if (updateInfoBuilder_ == null) {
14087           ensureUpdateInfoIsMutable();
14088           updateInfo_.remove(index);
14089           onChanged();
14090         } else {
14091           updateInfoBuilder_.remove(index);
14092         }
14093         return this;
14094       }
14095       /**
14096        * <code>repeated .hbase.pb.UpdateFavoredNodesRequest.RegionUpdateInfo update_info = 1;</code>
14097        */
14098       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo.Builder getUpdateInfoBuilder(
14099           int index) {
14100         return getUpdateInfoFieldBuilder().getBuilder(index);
14101       }
14102       /**
14103        * <code>repeated .hbase.pb.UpdateFavoredNodesRequest.RegionUpdateInfo update_info = 1;</code>
14104        */
14105       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfoOrBuilder getUpdateInfoOrBuilder(
14106           int index) {
14107         if (updateInfoBuilder_ == null) {
14108           return updateInfo_.get(index);  } else {
14109           return updateInfoBuilder_.getMessageOrBuilder(index);
14110         }
14111       }
14112       /**
14113        * <code>repeated .hbase.pb.UpdateFavoredNodesRequest.RegionUpdateInfo update_info = 1;</code>
14114        */
14115       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfoOrBuilder> 
14116            getUpdateInfoOrBuilderList() {
14117         if (updateInfoBuilder_ != null) {
14118           return updateInfoBuilder_.getMessageOrBuilderList();
14119         } else {
14120           return java.util.Collections.unmodifiableList(updateInfo_);
14121         }
14122       }
14123       /**
14124        * <code>repeated .hbase.pb.UpdateFavoredNodesRequest.RegionUpdateInfo update_info = 1;</code>
14125        */
14126       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo.Builder addUpdateInfoBuilder() {
14127         return getUpdateInfoFieldBuilder().addBuilder(
14128             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo.getDefaultInstance());
14129       }
14130       /**
14131        * <code>repeated .hbase.pb.UpdateFavoredNodesRequest.RegionUpdateInfo update_info = 1;</code>
14132        */
14133       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo.Builder addUpdateInfoBuilder(
14134           int index) {
14135         return getUpdateInfoFieldBuilder().addBuilder(
14136             index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo.getDefaultInstance());
14137       }
14138       /**
14139        * <code>repeated .hbase.pb.UpdateFavoredNodesRequest.RegionUpdateInfo update_info = 1;</code>
14140        */
14141       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo.Builder> 
14142            getUpdateInfoBuilderList() {
14143         return getUpdateInfoFieldBuilder().getBuilderList();
14144       }
14145       private com.google.protobuf.RepeatedFieldBuilder<
14146           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfoOrBuilder> 
14147           getUpdateInfoFieldBuilder() {
14148         if (updateInfoBuilder_ == null) {
14149           updateInfoBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
14150               org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfoOrBuilder>(
14151                   updateInfo_,
14152                   ((bitField0_ & 0x00000001) == 0x00000001),
14153                   getParentForChildren(),
14154                   isClean());
14155           updateInfo_ = null;
14156         }
14157         return updateInfoBuilder_;
14158       }
14159 
14160       // @@protoc_insertion_point(builder_scope:hbase.pb.UpdateFavoredNodesRequest)
14161     }
14162 
14163     static {
14164       defaultInstance = new UpdateFavoredNodesRequest(true);
14165       defaultInstance.initFields();
14166     }
14167 
14168     // @@protoc_insertion_point(class_scope:hbase.pb.UpdateFavoredNodesRequest)
14169   }
14170 
14171   public interface UpdateFavoredNodesResponseOrBuilder
14172       extends com.google.protobuf.MessageOrBuilder {
14173 
14174     // optional uint32 response = 1;
14175     /**
14176      * <code>optional uint32 response = 1;</code>
14177      */
14178     boolean hasResponse();
14179     /**
14180      * <code>optional uint32 response = 1;</code>
14181      */
14182     int getResponse();
14183   }
14184   /**
14185    * Protobuf type {@code hbase.pb.UpdateFavoredNodesResponse}
14186    */
14187   public static final class UpdateFavoredNodesResponse extends
14188       com.google.protobuf.GeneratedMessage
14189       implements UpdateFavoredNodesResponseOrBuilder {
14190     // Use UpdateFavoredNodesResponse.newBuilder() to construct.
14191     private UpdateFavoredNodesResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
14192       super(builder);
14193       this.unknownFields = builder.getUnknownFields();
14194     }
14195     private UpdateFavoredNodesResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
14196 
14197     private static final UpdateFavoredNodesResponse defaultInstance;
14198     public static UpdateFavoredNodesResponse getDefaultInstance() {
14199       return defaultInstance;
14200     }
14201 
14202     public UpdateFavoredNodesResponse getDefaultInstanceForType() {
14203       return defaultInstance;
14204     }
14205 
14206     private final com.google.protobuf.UnknownFieldSet unknownFields;
14207     @java.lang.Override
14208     public final com.google.protobuf.UnknownFieldSet
14209         getUnknownFields() {
14210       return this.unknownFields;
14211     }
14212     private UpdateFavoredNodesResponse(
14213         com.google.protobuf.CodedInputStream input,
14214         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14215         throws com.google.protobuf.InvalidProtocolBufferException {
14216       initFields();
14217       int mutable_bitField0_ = 0;
14218       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
14219           com.google.protobuf.UnknownFieldSet.newBuilder();
14220       try {
14221         boolean done = false;
14222         while (!done) {
14223           int tag = input.readTag();
14224           switch (tag) {
14225             case 0:
14226               done = true;
14227               break;
14228             default: {
14229               if (!parseUnknownField(input, unknownFields,
14230                                      extensionRegistry, tag)) {
14231                 done = true;
14232               }
14233               break;
14234             }
14235             case 8: {
14236               bitField0_ |= 0x00000001;
14237               response_ = input.readUInt32();
14238               break;
14239             }
14240           }
14241         }
14242       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
14243         throw e.setUnfinishedMessage(this);
14244       } catch (java.io.IOException e) {
14245         throw new com.google.protobuf.InvalidProtocolBufferException(
14246             e.getMessage()).setUnfinishedMessage(this);
14247       } finally {
14248         this.unknownFields = unknownFields.build();
14249         makeExtensionsImmutable();
14250       }
14251     }
14252     public static final com.google.protobuf.Descriptors.Descriptor
14253         getDescriptor() {
14254       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_UpdateFavoredNodesResponse_descriptor;
14255     }
14256 
14257     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
14258         internalGetFieldAccessorTable() {
14259       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_UpdateFavoredNodesResponse_fieldAccessorTable
14260           .ensureFieldAccessorsInitialized(
14261               org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse.Builder.class);
14262     }
14263 
14264     public static com.google.protobuf.Parser<UpdateFavoredNodesResponse> PARSER =
14265         new com.google.protobuf.AbstractParser<UpdateFavoredNodesResponse>() {
14266       public UpdateFavoredNodesResponse parsePartialFrom(
14267           com.google.protobuf.CodedInputStream input,
14268           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14269           throws com.google.protobuf.InvalidProtocolBufferException {
14270         return new UpdateFavoredNodesResponse(input, extensionRegistry);
14271       }
14272     };
14273 
14274     @java.lang.Override
14275     public com.google.protobuf.Parser<UpdateFavoredNodesResponse> getParserForType() {
14276       return PARSER;
14277     }
14278 
14279     private int bitField0_;
14280     // optional uint32 response = 1;
14281     public static final int RESPONSE_FIELD_NUMBER = 1;
14282     private int response_;
14283     /**
14284      * <code>optional uint32 response = 1;</code>
14285      */
14286     public boolean hasResponse() {
14287       return ((bitField0_ & 0x00000001) == 0x00000001);
14288     }
14289     /**
14290      * <code>optional uint32 response = 1;</code>
14291      */
14292     public int getResponse() {
14293       return response_;
14294     }
14295 
14296     private void initFields() {
14297       response_ = 0;
14298     }
14299     private byte memoizedIsInitialized = -1;
14300     public final boolean isInitialized() {
14301       byte isInitialized = memoizedIsInitialized;
14302       if (isInitialized != -1) return isInitialized == 1;
14303 
14304       memoizedIsInitialized = 1;
14305       return true;
14306     }
14307 
14308     public void writeTo(com.google.protobuf.CodedOutputStream output)
14309                         throws java.io.IOException {
14310       getSerializedSize();
14311       if (((bitField0_ & 0x00000001) == 0x00000001)) {
14312         output.writeUInt32(1, response_);
14313       }
14314       getUnknownFields().writeTo(output);
14315     }
14316 
14317     private int memoizedSerializedSize = -1;
14318     public int getSerializedSize() {
14319       int size = memoizedSerializedSize;
14320       if (size != -1) return size;
14321 
14322       size = 0;
14323       if (((bitField0_ & 0x00000001) == 0x00000001)) {
14324         size += com.google.protobuf.CodedOutputStream
14325           .computeUInt32Size(1, response_);
14326       }
14327       size += getUnknownFields().getSerializedSize();
14328       memoizedSerializedSize = size;
14329       return size;
14330     }
14331 
14332     private static final long serialVersionUID = 0L;
14333     @java.lang.Override
14334     protected java.lang.Object writeReplace()
14335         throws java.io.ObjectStreamException {
14336       return super.writeReplace();
14337     }
14338 
14339     @java.lang.Override
14340     public boolean equals(final java.lang.Object obj) {
14341       if (obj == this) {
14342        return true;
14343       }
14344       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse)) {
14345         return super.equals(obj);
14346       }
14347       org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse) obj;
14348 
14349       boolean result = true;
14350       result = result && (hasResponse() == other.hasResponse());
14351       if (hasResponse()) {
14352         result = result && (getResponse()
14353             == other.getResponse());
14354       }
14355       result = result &&
14356           getUnknownFields().equals(other.getUnknownFields());
14357       return result;
14358     }
14359 
14360     private int memoizedHashCode = 0;
14361     @java.lang.Override
14362     public int hashCode() {
14363       if (memoizedHashCode != 0) {
14364         return memoizedHashCode;
14365       }
14366       int hash = 41;
14367       hash = (19 * hash) + getDescriptorForType().hashCode();
14368       if (hasResponse()) {
14369         hash = (37 * hash) + RESPONSE_FIELD_NUMBER;
14370         hash = (53 * hash) + getResponse();
14371       }
14372       hash = (29 * hash) + getUnknownFields().hashCode();
14373       memoizedHashCode = hash;
14374       return hash;
14375     }
14376 
14377     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse parseFrom(
14378         com.google.protobuf.ByteString data)
14379         throws com.google.protobuf.InvalidProtocolBufferException {
14380       return PARSER.parseFrom(data);
14381     }
14382     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse parseFrom(
14383         com.google.protobuf.ByteString data,
14384         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14385         throws com.google.protobuf.InvalidProtocolBufferException {
14386       return PARSER.parseFrom(data, extensionRegistry);
14387     }
14388     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse parseFrom(byte[] data)
14389         throws com.google.protobuf.InvalidProtocolBufferException {
14390       return PARSER.parseFrom(data);
14391     }
14392     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse parseFrom(
14393         byte[] data,
14394         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14395         throws com.google.protobuf.InvalidProtocolBufferException {
14396       return PARSER.parseFrom(data, extensionRegistry);
14397     }
14398     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse parseFrom(java.io.InputStream input)
14399         throws java.io.IOException {
14400       return PARSER.parseFrom(input);
14401     }
14402     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse parseFrom(
14403         java.io.InputStream input,
14404         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14405         throws java.io.IOException {
14406       return PARSER.parseFrom(input, extensionRegistry);
14407     }
14408     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse parseDelimitedFrom(java.io.InputStream input)
14409         throws java.io.IOException {
14410       return PARSER.parseDelimitedFrom(input);
14411     }
14412     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse parseDelimitedFrom(
14413         java.io.InputStream input,
14414         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14415         throws java.io.IOException {
14416       return PARSER.parseDelimitedFrom(input, extensionRegistry);
14417     }
14418     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse parseFrom(
14419         com.google.protobuf.CodedInputStream input)
14420         throws java.io.IOException {
14421       return PARSER.parseFrom(input);
14422     }
14423     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse parseFrom(
14424         com.google.protobuf.CodedInputStream input,
14425         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14426         throws java.io.IOException {
14427       return PARSER.parseFrom(input, extensionRegistry);
14428     }
14429 
14430     public static Builder newBuilder() { return Builder.create(); }
14431     public Builder newBuilderForType() { return newBuilder(); }
14432     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse prototype) {
14433       return newBuilder().mergeFrom(prototype);
14434     }
14435     public Builder toBuilder() { return newBuilder(this); }
14436 
14437     @java.lang.Override
14438     protected Builder newBuilderForType(
14439         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
14440       Builder builder = new Builder(parent);
14441       return builder;
14442     }
14443     /**
14444      * Protobuf type {@code hbase.pb.UpdateFavoredNodesResponse}
14445      */
14446     public static final class Builder extends
14447         com.google.protobuf.GeneratedMessage.Builder<Builder>
14448        implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponseOrBuilder {
14449       public static final com.google.protobuf.Descriptors.Descriptor
14450           getDescriptor() {
14451         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_UpdateFavoredNodesResponse_descriptor;
14452       }
14453 
14454       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
14455           internalGetFieldAccessorTable() {
14456         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_UpdateFavoredNodesResponse_fieldAccessorTable
14457             .ensureFieldAccessorsInitialized(
14458                 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse.Builder.class);
14459       }
14460 
14461       // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse.newBuilder()
14462       private Builder() {
14463         maybeForceBuilderInitialization();
14464       }
14465 
14466       private Builder(
14467           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
14468         super(parent);
14469         maybeForceBuilderInitialization();
14470       }
14471       private void maybeForceBuilderInitialization() {
14472         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
14473         }
14474       }
14475       private static Builder create() {
14476         return new Builder();
14477       }
14478 
14479       public Builder clear() {
14480         super.clear();
14481         response_ = 0;
14482         bitField0_ = (bitField0_ & ~0x00000001);
14483         return this;
14484       }
14485 
14486       public Builder clone() {
14487         return create().mergeFrom(buildPartial());
14488       }
14489 
14490       public com.google.protobuf.Descriptors.Descriptor
14491           getDescriptorForType() {
14492         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_UpdateFavoredNodesResponse_descriptor;
14493       }
14494 
14495       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse getDefaultInstanceForType() {
14496         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse.getDefaultInstance();
14497       }
14498 
14499       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse build() {
14500         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse result = buildPartial();
14501         if (!result.isInitialized()) {
14502           throw newUninitializedMessageException(result);
14503         }
14504         return result;
14505       }
14506 
14507       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse buildPartial() {
14508         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse(this);
14509         int from_bitField0_ = bitField0_;
14510         int to_bitField0_ = 0;
14511         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
14512           to_bitField0_ |= 0x00000001;
14513         }
14514         result.response_ = response_;
14515         result.bitField0_ = to_bitField0_;
14516         onBuilt();
14517         return result;
14518       }
14519 
14520       public Builder mergeFrom(com.google.protobuf.Message other) {
14521         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse) {
14522           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse)other);
14523         } else {
14524           super.mergeFrom(other);
14525           return this;
14526         }
14527       }
14528 
14529       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse other) {
14530         if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse.getDefaultInstance()) return this;
14531         if (other.hasResponse()) {
14532           setResponse(other.getResponse());
14533         }
14534         this.mergeUnknownFields(other.getUnknownFields());
14535         return this;
14536       }
14537 
14538       public final boolean isInitialized() {
14539         return true;
14540       }
14541 
14542       public Builder mergeFrom(
14543           com.google.protobuf.CodedInputStream input,
14544           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14545           throws java.io.IOException {
14546         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse parsedMessage = null;
14547         try {
14548           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
14549         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
14550           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse) e.getUnfinishedMessage();
14551           throw e;
14552         } finally {
14553           if (parsedMessage != null) {
14554             mergeFrom(parsedMessage);
14555           }
14556         }
14557         return this;
14558       }
14559       private int bitField0_;
14560 
14561       // optional uint32 response = 1;
14562       private int response_ ;
14563       /**
14564        * <code>optional uint32 response = 1;</code>
14565        */
14566       public boolean hasResponse() {
14567         return ((bitField0_ & 0x00000001) == 0x00000001);
14568       }
14569       /**
14570        * <code>optional uint32 response = 1;</code>
14571        */
14572       public int getResponse() {
14573         return response_;
14574       }
14575       /**
14576        * <code>optional uint32 response = 1;</code>
14577        */
14578       public Builder setResponse(int value) {
14579         bitField0_ |= 0x00000001;
14580         response_ = value;
14581         onChanged();
14582         return this;
14583       }
14584       /**
14585        * <code>optional uint32 response = 1;</code>
14586        */
14587       public Builder clearResponse() {
14588         bitField0_ = (bitField0_ & ~0x00000001);
14589         response_ = 0;
14590         onChanged();
14591         return this;
14592       }
14593 
14594       // @@protoc_insertion_point(builder_scope:hbase.pb.UpdateFavoredNodesResponse)
14595     }
14596 
14597     static {
14598       defaultInstance = new UpdateFavoredNodesResponse(true);
14599       defaultInstance.initFields();
14600     }
14601 
14602     // @@protoc_insertion_point(class_scope:hbase.pb.UpdateFavoredNodesResponse)
14603   }
14604 
14605   public interface MergeRegionsRequestOrBuilder
14606       extends com.google.protobuf.MessageOrBuilder {
14607 
14608     // required .hbase.pb.RegionSpecifier region_a = 1;
14609     /**
14610      * <code>required .hbase.pb.RegionSpecifier region_a = 1;</code>
14611      */
14612     boolean hasRegionA();
14613     /**
14614      * <code>required .hbase.pb.RegionSpecifier region_a = 1;</code>
14615      */
14616     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegionA();
14617     /**
14618      * <code>required .hbase.pb.RegionSpecifier region_a = 1;</code>
14619      */
14620     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionAOrBuilder();
14621 
14622     // required .hbase.pb.RegionSpecifier region_b = 2;
14623     /**
14624      * <code>required .hbase.pb.RegionSpecifier region_b = 2;</code>
14625      */
14626     boolean hasRegionB();
14627     /**
14628      * <code>required .hbase.pb.RegionSpecifier region_b = 2;</code>
14629      */
14630     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegionB();
14631     /**
14632      * <code>required .hbase.pb.RegionSpecifier region_b = 2;</code>
14633      */
14634     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionBOrBuilder();
14635 
14636     // optional bool forcible = 3 [default = false];
14637     /**
14638      * <code>optional bool forcible = 3 [default = false];</code>
14639      */
14640     boolean hasForcible();
14641     /**
14642      * <code>optional bool forcible = 3 [default = false];</code>
14643      */
14644     boolean getForcible();
14645 
14646     // optional uint64 master_system_time = 4;
14647     /**
14648      * <code>optional uint64 master_system_time = 4;</code>
14649      *
14650      * <pre>
14651      * wall clock time from master
14652      * </pre>
14653      */
14654     boolean hasMasterSystemTime();
14655     /**
14656      * <code>optional uint64 master_system_time = 4;</code>
14657      *
14658      * <pre>
14659      * wall clock time from master
14660      * </pre>
14661      */
14662     long getMasterSystemTime();
14663   }
14664   /**
14665    * Protobuf type {@code hbase.pb.MergeRegionsRequest}
14666    *
14667    * <pre>
14668    **
14669    * Merges the specified regions.
14670    * &lt;p&gt;
14671    * This method currently closes the regions and then merges them
14672    * </pre>
14673    */
14674   public static final class MergeRegionsRequest extends
14675       com.google.protobuf.GeneratedMessage
14676       implements MergeRegionsRequestOrBuilder {
14677     // Use MergeRegionsRequest.newBuilder() to construct.
14678     private MergeRegionsRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
14679       super(builder);
14680       this.unknownFields = builder.getUnknownFields();
14681     }
14682     private MergeRegionsRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
14683 
14684     private static final MergeRegionsRequest defaultInstance;
14685     public static MergeRegionsRequest getDefaultInstance() {
14686       return defaultInstance;
14687     }
14688 
14689     public MergeRegionsRequest getDefaultInstanceForType() {
14690       return defaultInstance;
14691     }
14692 
14693     private final com.google.protobuf.UnknownFieldSet unknownFields;
14694     @java.lang.Override
14695     public final com.google.protobuf.UnknownFieldSet
14696         getUnknownFields() {
14697       return this.unknownFields;
14698     }
14699     private MergeRegionsRequest(
14700         com.google.protobuf.CodedInputStream input,
14701         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14702         throws com.google.protobuf.InvalidProtocolBufferException {
14703       initFields();
14704       int mutable_bitField0_ = 0;
14705       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
14706           com.google.protobuf.UnknownFieldSet.newBuilder();
14707       try {
14708         boolean done = false;
14709         while (!done) {
14710           int tag = input.readTag();
14711           switch (tag) {
14712             case 0:
14713               done = true;
14714               break;
14715             default: {
14716               if (!parseUnknownField(input, unknownFields,
14717                                      extensionRegistry, tag)) {
14718                 done = true;
14719               }
14720               break;
14721             }
14722             case 10: {
14723               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null;
14724               if (((bitField0_ & 0x00000001) == 0x00000001)) {
14725                 subBuilder = regionA_.toBuilder();
14726               }
14727               regionA_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry);
14728               if (subBuilder != null) {
14729                 subBuilder.mergeFrom(regionA_);
14730                 regionA_ = subBuilder.buildPartial();
14731               }
14732               bitField0_ |= 0x00000001;
14733               break;
14734             }
14735             case 18: {
14736               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null;
14737               if (((bitField0_ & 0x00000002) == 0x00000002)) {
14738                 subBuilder = regionB_.toBuilder();
14739               }
14740               regionB_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry);
14741               if (subBuilder != null) {
14742                 subBuilder.mergeFrom(regionB_);
14743                 regionB_ = subBuilder.buildPartial();
14744               }
14745               bitField0_ |= 0x00000002;
14746               break;
14747             }
14748             case 24: {
14749               bitField0_ |= 0x00000004;
14750               forcible_ = input.readBool();
14751               break;
14752             }
14753             case 32: {
14754               bitField0_ |= 0x00000008;
14755               masterSystemTime_ = input.readUInt64();
14756               break;
14757             }
14758           }
14759         }
14760       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
14761         throw e.setUnfinishedMessage(this);
14762       } catch (java.io.IOException e) {
14763         throw new com.google.protobuf.InvalidProtocolBufferException(
14764             e.getMessage()).setUnfinishedMessage(this);
14765       } finally {
14766         this.unknownFields = unknownFields.build();
14767         makeExtensionsImmutable();
14768       }
14769     }
14770     public static final com.google.protobuf.Descriptors.Descriptor
14771         getDescriptor() {
14772       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_MergeRegionsRequest_descriptor;
14773     }
14774 
14775     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
14776         internalGetFieldAccessorTable() {
14777       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_MergeRegionsRequest_fieldAccessorTable
14778           .ensureFieldAccessorsInitialized(
14779               org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest.Builder.class);
14780     }
14781 
14782     public static com.google.protobuf.Parser<MergeRegionsRequest> PARSER =
14783         new com.google.protobuf.AbstractParser<MergeRegionsRequest>() {
14784       public MergeRegionsRequest parsePartialFrom(
14785           com.google.protobuf.CodedInputStream input,
14786           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14787           throws com.google.protobuf.InvalidProtocolBufferException {
14788         return new MergeRegionsRequest(input, extensionRegistry);
14789       }
14790     };
14791 
14792     @java.lang.Override
14793     public com.google.protobuf.Parser<MergeRegionsRequest> getParserForType() {
14794       return PARSER;
14795     }
14796 
14797     private int bitField0_;
14798     // required .hbase.pb.RegionSpecifier region_a = 1;
14799     public static final int REGION_A_FIELD_NUMBER = 1;
14800     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier regionA_;
14801     /**
14802      * <code>required .hbase.pb.RegionSpecifier region_a = 1;</code>
14803      */
14804     public boolean hasRegionA() {
14805       return ((bitField0_ & 0x00000001) == 0x00000001);
14806     }
14807     /**
14808      * <code>required .hbase.pb.RegionSpecifier region_a = 1;</code>
14809      */
14810     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegionA() {
14811       return regionA_;
14812     }
14813     /**
14814      * <code>required .hbase.pb.RegionSpecifier region_a = 1;</code>
14815      */
14816     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionAOrBuilder() {
14817       return regionA_;
14818     }
14819 
14820     // required .hbase.pb.RegionSpecifier region_b = 2;
14821     public static final int REGION_B_FIELD_NUMBER = 2;
14822     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier regionB_;
14823     /**
14824      * <code>required .hbase.pb.RegionSpecifier region_b = 2;</code>
14825      */
14826     public boolean hasRegionB() {
14827       return ((bitField0_ & 0x00000002) == 0x00000002);
14828     }
14829     /**
14830      * <code>required .hbase.pb.RegionSpecifier region_b = 2;</code>
14831      */
14832     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegionB() {
14833       return regionB_;
14834     }
14835     /**
14836      * <code>required .hbase.pb.RegionSpecifier region_b = 2;</code>
14837      */
14838     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionBOrBuilder() {
14839       return regionB_;
14840     }
14841 
14842     // optional bool forcible = 3 [default = false];
14843     public static final int FORCIBLE_FIELD_NUMBER = 3;
14844     private boolean forcible_;
14845     /**
14846      * <code>optional bool forcible = 3 [default = false];</code>
14847      */
14848     public boolean hasForcible() {
14849       return ((bitField0_ & 0x00000004) == 0x00000004);
14850     }
14851     /**
14852      * <code>optional bool forcible = 3 [default = false];</code>
14853      */
14854     public boolean getForcible() {
14855       return forcible_;
14856     }
14857 
14858     // optional uint64 master_system_time = 4;
14859     public static final int MASTER_SYSTEM_TIME_FIELD_NUMBER = 4;
14860     private long masterSystemTime_;
14861     /**
14862      * <code>optional uint64 master_system_time = 4;</code>
14863      *
14864      * <pre>
14865      * wall clock time from master
14866      * </pre>
14867      */
14868     public boolean hasMasterSystemTime() {
14869       return ((bitField0_ & 0x00000008) == 0x00000008);
14870     }
14871     /**
14872      * <code>optional uint64 master_system_time = 4;</code>
14873      *
14874      * <pre>
14875      * wall clock time from master
14876      * </pre>
14877      */
14878     public long getMasterSystemTime() {
14879       return masterSystemTime_;
14880     }
14881 
14882     private void initFields() {
14883       regionA_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
14884       regionB_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
14885       forcible_ = false;
14886       masterSystemTime_ = 0L;
14887     }
14888     private byte memoizedIsInitialized = -1;
14889     public final boolean isInitialized() {
14890       byte isInitialized = memoizedIsInitialized;
14891       if (isInitialized != -1) return isInitialized == 1;
14892 
14893       if (!hasRegionA()) {
14894         memoizedIsInitialized = 0;
14895         return false;
14896       }
14897       if (!hasRegionB()) {
14898         memoizedIsInitialized = 0;
14899         return false;
14900       }
14901       if (!getRegionA().isInitialized()) {
14902         memoizedIsInitialized = 0;
14903         return false;
14904       }
14905       if (!getRegionB().isInitialized()) {
14906         memoizedIsInitialized = 0;
14907         return false;
14908       }
14909       memoizedIsInitialized = 1;
14910       return true;
14911     }
14912 
14913     public void writeTo(com.google.protobuf.CodedOutputStream output)
14914                         throws java.io.IOException {
14915       getSerializedSize();
14916       if (((bitField0_ & 0x00000001) == 0x00000001)) {
14917         output.writeMessage(1, regionA_);
14918       }
14919       if (((bitField0_ & 0x00000002) == 0x00000002)) {
14920         output.writeMessage(2, regionB_);
14921       }
14922       if (((bitField0_ & 0x00000004) == 0x00000004)) {
14923         output.writeBool(3, forcible_);
14924       }
14925       if (((bitField0_ & 0x00000008) == 0x00000008)) {
14926         output.writeUInt64(4, masterSystemTime_);
14927       }
14928       getUnknownFields().writeTo(output);
14929     }
14930 
14931     private int memoizedSerializedSize = -1;
14932     public int getSerializedSize() {
14933       int size = memoizedSerializedSize;
14934       if (size != -1) return size;
14935 
14936       size = 0;
14937       if (((bitField0_ & 0x00000001) == 0x00000001)) {
14938         size += com.google.protobuf.CodedOutputStream
14939           .computeMessageSize(1, regionA_);
14940       }
14941       if (((bitField0_ & 0x00000002) == 0x00000002)) {
14942         size += com.google.protobuf.CodedOutputStream
14943           .computeMessageSize(2, regionB_);
14944       }
14945       if (((bitField0_ & 0x00000004) == 0x00000004)) {
14946         size += com.google.protobuf.CodedOutputStream
14947           .computeBoolSize(3, forcible_);
14948       }
14949       if (((bitField0_ & 0x00000008) == 0x00000008)) {
14950         size += com.google.protobuf.CodedOutputStream
14951           .computeUInt64Size(4, masterSystemTime_);
14952       }
14953       size += getUnknownFields().getSerializedSize();
14954       memoizedSerializedSize = size;
14955       return size;
14956     }
14957 
14958     private static final long serialVersionUID = 0L;
14959     @java.lang.Override
14960     protected java.lang.Object writeReplace()
14961         throws java.io.ObjectStreamException {
14962       return super.writeReplace();
14963     }
14964 
14965     @java.lang.Override
14966     public boolean equals(final java.lang.Object obj) {
14967       if (obj == this) {
14968        return true;
14969       }
14970       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest)) {
14971         return super.equals(obj);
14972       }
14973       org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest) obj;
14974 
14975       boolean result = true;
14976       result = result && (hasRegionA() == other.hasRegionA());
14977       if (hasRegionA()) {
14978         result = result && getRegionA()
14979             .equals(other.getRegionA());
14980       }
14981       result = result && (hasRegionB() == other.hasRegionB());
14982       if (hasRegionB()) {
14983         result = result && getRegionB()
14984             .equals(other.getRegionB());
14985       }
14986       result = result && (hasForcible() == other.hasForcible());
14987       if (hasForcible()) {
14988         result = result && (getForcible()
14989             == other.getForcible());
14990       }
14991       result = result && (hasMasterSystemTime() == other.hasMasterSystemTime());
14992       if (hasMasterSystemTime()) {
14993         result = result && (getMasterSystemTime()
14994             == other.getMasterSystemTime());
14995       }
14996       result = result &&
14997           getUnknownFields().equals(other.getUnknownFields());
14998       return result;
14999     }
15000 
15001     private int memoizedHashCode = 0;
15002     @java.lang.Override
15003     public int hashCode() {
15004       if (memoizedHashCode != 0) {
15005         return memoizedHashCode;
15006       }
15007       int hash = 41;
15008       hash = (19 * hash) + getDescriptorForType().hashCode();
15009       if (hasRegionA()) {
15010         hash = (37 * hash) + REGION_A_FIELD_NUMBER;
15011         hash = (53 * hash) + getRegionA().hashCode();
15012       }
15013       if (hasRegionB()) {
15014         hash = (37 * hash) + REGION_B_FIELD_NUMBER;
15015         hash = (53 * hash) + getRegionB().hashCode();
15016       }
15017       if (hasForcible()) {
15018         hash = (37 * hash) + FORCIBLE_FIELD_NUMBER;
15019         hash = (53 * hash) + hashBoolean(getForcible());
15020       }
15021       if (hasMasterSystemTime()) {
15022         hash = (37 * hash) + MASTER_SYSTEM_TIME_FIELD_NUMBER;
15023         hash = (53 * hash) + hashLong(getMasterSystemTime());
15024       }
15025       hash = (29 * hash) + getUnknownFields().hashCode();
15026       memoizedHashCode = hash;
15027       return hash;
15028     }
15029 
15030     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest parseFrom(
15031         com.google.protobuf.ByteString data)
15032         throws com.google.protobuf.InvalidProtocolBufferException {
15033       return PARSER.parseFrom(data);
15034     }
15035     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest parseFrom(
15036         com.google.protobuf.ByteString data,
15037         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15038         throws com.google.protobuf.InvalidProtocolBufferException {
15039       return PARSER.parseFrom(data, extensionRegistry);
15040     }
15041     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest parseFrom(byte[] data)
15042         throws com.google.protobuf.InvalidProtocolBufferException {
15043       return PARSER.parseFrom(data);
15044     }
15045     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest parseFrom(
15046         byte[] data,
15047         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15048         throws com.google.protobuf.InvalidProtocolBufferException {
15049       return PARSER.parseFrom(data, extensionRegistry);
15050     }
15051     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest parseFrom(java.io.InputStream input)
15052         throws java.io.IOException {
15053       return PARSER.parseFrom(input);
15054     }
15055     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest parseFrom(
15056         java.io.InputStream input,
15057         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15058         throws java.io.IOException {
15059       return PARSER.parseFrom(input, extensionRegistry);
15060     }
15061     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest parseDelimitedFrom(java.io.InputStream input)
15062         throws java.io.IOException {
15063       return PARSER.parseDelimitedFrom(input);
15064     }
15065     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest parseDelimitedFrom(
15066         java.io.InputStream input,
15067         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15068         throws java.io.IOException {
15069       return PARSER.parseDelimitedFrom(input, extensionRegistry);
15070     }
15071     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest parseFrom(
15072         com.google.protobuf.CodedInputStream input)
15073         throws java.io.IOException {
15074       return PARSER.parseFrom(input);
15075     }
15076     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest parseFrom(
15077         com.google.protobuf.CodedInputStream input,
15078         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15079         throws java.io.IOException {
15080       return PARSER.parseFrom(input, extensionRegistry);
15081     }
15082 
15083     public static Builder newBuilder() { return Builder.create(); }
15084     public Builder newBuilderForType() { return newBuilder(); }
15085     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest prototype) {
15086       return newBuilder().mergeFrom(prototype);
15087     }
15088     public Builder toBuilder() { return newBuilder(this); }
15089 
15090     @java.lang.Override
15091     protected Builder newBuilderForType(
15092         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
15093       Builder builder = new Builder(parent);
15094       return builder;
15095     }
15096     /**
15097      * Protobuf type {@code hbase.pb.MergeRegionsRequest}
15098      *
15099      * <pre>
15100      **
15101      * Merges the specified regions.
15102      * &lt;p&gt;
15103      * This method currently closes the regions and then merges them
15104      * </pre>
15105      */
15106     public static final class Builder extends
15107         com.google.protobuf.GeneratedMessage.Builder<Builder>
15108        implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequestOrBuilder {
15109       public static final com.google.protobuf.Descriptors.Descriptor
15110           getDescriptor() {
15111         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_MergeRegionsRequest_descriptor;
15112       }
15113 
15114       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
15115           internalGetFieldAccessorTable() {
15116         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_MergeRegionsRequest_fieldAccessorTable
15117             .ensureFieldAccessorsInitialized(
15118                 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest.Builder.class);
15119       }
15120 
15121       // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest.newBuilder()
15122       private Builder() {
15123         maybeForceBuilderInitialization();
15124       }
15125 
15126       private Builder(
15127           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
15128         super(parent);
15129         maybeForceBuilderInitialization();
15130       }
15131       private void maybeForceBuilderInitialization() {
15132         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
15133           getRegionAFieldBuilder();
15134           getRegionBFieldBuilder();
15135         }
15136       }
15137       private static Builder create() {
15138         return new Builder();
15139       }
15140 
15141       public Builder clear() {
15142         super.clear();
15143         if (regionABuilder_ == null) {
15144           regionA_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
15145         } else {
15146           regionABuilder_.clear();
15147         }
15148         bitField0_ = (bitField0_ & ~0x00000001);
15149         if (regionBBuilder_ == null) {
15150           regionB_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
15151         } else {
15152           regionBBuilder_.clear();
15153         }
15154         bitField0_ = (bitField0_ & ~0x00000002);
15155         forcible_ = false;
15156         bitField0_ = (bitField0_ & ~0x00000004);
15157         masterSystemTime_ = 0L;
15158         bitField0_ = (bitField0_ & ~0x00000008);
15159         return this;
15160       }
15161 
15162       public Builder clone() {
15163         return create().mergeFrom(buildPartial());
15164       }
15165 
15166       public com.google.protobuf.Descriptors.Descriptor
15167           getDescriptorForType() {
15168         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_MergeRegionsRequest_descriptor;
15169       }
15170 
15171       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest getDefaultInstanceForType() {
15172         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest.getDefaultInstance();
15173       }
15174 
15175       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest build() {
15176         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest result = buildPartial();
15177         if (!result.isInitialized()) {
15178           throw newUninitializedMessageException(result);
15179         }
15180         return result;
15181       }
15182 
15183       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest buildPartial() {
15184         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest(this);
15185         int from_bitField0_ = bitField0_;
15186         int to_bitField0_ = 0;
15187         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
15188           to_bitField0_ |= 0x00000001;
15189         }
15190         if (regionABuilder_ == null) {
15191           result.regionA_ = regionA_;
15192         } else {
15193           result.regionA_ = regionABuilder_.build();
15194         }
15195         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
15196           to_bitField0_ |= 0x00000002;
15197         }
15198         if (regionBBuilder_ == null) {
15199           result.regionB_ = regionB_;
15200         } else {
15201           result.regionB_ = regionBBuilder_.build();
15202         }
15203         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
15204           to_bitField0_ |= 0x00000004;
15205         }
15206         result.forcible_ = forcible_;
15207         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
15208           to_bitField0_ |= 0x00000008;
15209         }
15210         result.masterSystemTime_ = masterSystemTime_;
15211         result.bitField0_ = to_bitField0_;
15212         onBuilt();
15213         return result;
15214       }
15215 
15216       public Builder mergeFrom(com.google.protobuf.Message other) {
15217         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest) {
15218           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest)other);
15219         } else {
15220           super.mergeFrom(other);
15221           return this;
15222         }
15223       }
15224 
15225       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest other) {
15226         if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest.getDefaultInstance()) return this;
15227         if (other.hasRegionA()) {
15228           mergeRegionA(other.getRegionA());
15229         }
15230         if (other.hasRegionB()) {
15231           mergeRegionB(other.getRegionB());
15232         }
15233         if (other.hasForcible()) {
15234           setForcible(other.getForcible());
15235         }
15236         if (other.hasMasterSystemTime()) {
15237           setMasterSystemTime(other.getMasterSystemTime());
15238         }
15239         this.mergeUnknownFields(other.getUnknownFields());
15240         return this;
15241       }
15242 
15243       public final boolean isInitialized() {
15244         if (!hasRegionA()) {
15245           
15246           return false;
15247         }
15248         if (!hasRegionB()) {
15249           
15250           return false;
15251         }
15252         if (!getRegionA().isInitialized()) {
15253           
15254           return false;
15255         }
15256         if (!getRegionB().isInitialized()) {
15257           
15258           return false;
15259         }
15260         return true;
15261       }
15262 
15263       public Builder mergeFrom(
15264           com.google.protobuf.CodedInputStream input,
15265           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15266           throws java.io.IOException {
15267         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest parsedMessage = null;
15268         try {
15269           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
15270         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
15271           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest) e.getUnfinishedMessage();
15272           throw e;
15273         } finally {
15274           if (parsedMessage != null) {
15275             mergeFrom(parsedMessage);
15276           }
15277         }
15278         return this;
15279       }
15280       private int bitField0_;
15281 
15282       // required .hbase.pb.RegionSpecifier region_a = 1;
15283       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier regionA_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
15284       private com.google.protobuf.SingleFieldBuilder<
15285           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionABuilder_;
15286       /**
15287        * <code>required .hbase.pb.RegionSpecifier region_a = 1;</code>
15288        */
15289       public boolean hasRegionA() {
15290         return ((bitField0_ & 0x00000001) == 0x00000001);
15291       }
15292       /**
15293        * <code>required .hbase.pb.RegionSpecifier region_a = 1;</code>
15294        */
15295       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegionA() {
15296         if (regionABuilder_ == null) {
15297           return regionA_;
15298         } else {
15299           return regionABuilder_.getMessage();
15300         }
15301       }
15302       /**
15303        * <code>required .hbase.pb.RegionSpecifier region_a = 1;</code>
15304        */
15305       public Builder setRegionA(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
15306         if (regionABuilder_ == null) {
15307           if (value == null) {
15308             throw new NullPointerException();
15309           }
15310           regionA_ = value;
15311           onChanged();
15312         } else {
15313           regionABuilder_.setMessage(value);
15314         }
15315         bitField0_ |= 0x00000001;
15316         return this;
15317       }
15318       /**
15319        * <code>required .hbase.pb.RegionSpecifier region_a = 1;</code>
15320        */
15321       public Builder setRegionA(
15322           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
15323         if (regionABuilder_ == null) {
15324           regionA_ = builderForValue.build();
15325           onChanged();
15326         } else {
15327           regionABuilder_.setMessage(builderForValue.build());
15328         }
15329         bitField0_ |= 0x00000001;
15330         return this;
15331       }
15332       /**
15333        * <code>required .hbase.pb.RegionSpecifier region_a = 1;</code>
15334        */
15335       public Builder mergeRegionA(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
15336         if (regionABuilder_ == null) {
15337           if (((bitField0_ & 0x00000001) == 0x00000001) &&
15338               regionA_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) {
15339             regionA_ =
15340               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(regionA_).mergeFrom(value).buildPartial();
15341           } else {
15342             regionA_ = value;
15343           }
15344           onChanged();
15345         } else {
15346           regionABuilder_.mergeFrom(value);
15347         }
15348         bitField0_ |= 0x00000001;
15349         return this;
15350       }
15351       /**
15352        * <code>required .hbase.pb.RegionSpecifier region_a = 1;</code>
15353        */
15354       public Builder clearRegionA() {
15355         if (regionABuilder_ == null) {
15356           regionA_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
15357           onChanged();
15358         } else {
15359           regionABuilder_.clear();
15360         }
15361         bitField0_ = (bitField0_ & ~0x00000001);
15362         return this;
15363       }
15364       /**
15365        * <code>required .hbase.pb.RegionSpecifier region_a = 1;</code>
15366        */
15367       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionABuilder() {
15368         bitField0_ |= 0x00000001;
15369         onChanged();
15370         return getRegionAFieldBuilder().getBuilder();
15371       }
15372       /**
15373        * <code>required .hbase.pb.RegionSpecifier region_a = 1;</code>
15374        */
15375       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionAOrBuilder() {
15376         if (regionABuilder_ != null) {
15377           return regionABuilder_.getMessageOrBuilder();
15378         } else {
15379           return regionA_;
15380         }
15381       }
15382       /**
15383        * <code>required .hbase.pb.RegionSpecifier region_a = 1;</code>
15384        */
15385       private com.google.protobuf.SingleFieldBuilder<
15386           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> 
15387           getRegionAFieldBuilder() {
15388         if (regionABuilder_ == null) {
15389           regionABuilder_ = new com.google.protobuf.SingleFieldBuilder<
15390               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
15391                   regionA_,
15392                   getParentForChildren(),
15393                   isClean());
15394           regionA_ = null;
15395         }
15396         return regionABuilder_;
15397       }
15398 
15399       // required .hbase.pb.RegionSpecifier region_b = 2;
15400       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier regionB_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
15401       private com.google.protobuf.SingleFieldBuilder<
15402           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBBuilder_;
15403       /**
15404        * <code>required .hbase.pb.RegionSpecifier region_b = 2;</code>
15405        */
15406       public boolean hasRegionB() {
15407         return ((bitField0_ & 0x00000002) == 0x00000002);
15408       }
15409       /**
15410        * <code>required .hbase.pb.RegionSpecifier region_b = 2;</code>
15411        */
15412       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegionB() {
15413         if (regionBBuilder_ == null) {
15414           return regionB_;
15415         } else {
15416           return regionBBuilder_.getMessage();
15417         }
15418       }
15419       /**
15420        * <code>required .hbase.pb.RegionSpecifier region_b = 2;</code>
15421        */
15422       public Builder setRegionB(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
15423         if (regionBBuilder_ == null) {
15424           if (value == null) {
15425             throw new NullPointerException();
15426           }
15427           regionB_ = value;
15428           onChanged();
15429         } else {
15430           regionBBuilder_.setMessage(value);
15431         }
15432         bitField0_ |= 0x00000002;
15433         return this;
15434       }
15435       /**
15436        * <code>required .hbase.pb.RegionSpecifier region_b = 2;</code>
15437        */
15438       public Builder setRegionB(
15439           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
15440         if (regionBBuilder_ == null) {
15441           regionB_ = builderForValue.build();
15442           onChanged();
15443         } else {
15444           regionBBuilder_.setMessage(builderForValue.build());
15445         }
15446         bitField0_ |= 0x00000002;
15447         return this;
15448       }
15449       /**
15450        * <code>required .hbase.pb.RegionSpecifier region_b = 2;</code>
15451        */
15452       public Builder mergeRegionB(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
15453         if (regionBBuilder_ == null) {
15454           if (((bitField0_ & 0x00000002) == 0x00000002) &&
15455               regionB_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) {
15456             regionB_ =
15457               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(regionB_).mergeFrom(value).buildPartial();
15458           } else {
15459             regionB_ = value;
15460           }
15461           onChanged();
15462         } else {
15463           regionBBuilder_.mergeFrom(value);
15464         }
15465         bitField0_ |= 0x00000002;
15466         return this;
15467       }
15468       /**
15469        * <code>required .hbase.pb.RegionSpecifier region_b = 2;</code>
15470        */
15471       public Builder clearRegionB() {
15472         if (regionBBuilder_ == null) {
15473           regionB_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
15474           onChanged();
15475         } else {
15476           regionBBuilder_.clear();
15477         }
15478         bitField0_ = (bitField0_ & ~0x00000002);
15479         return this;
15480       }
15481       /**
15482        * <code>required .hbase.pb.RegionSpecifier region_b = 2;</code>
15483        */
15484       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBBuilder() {
15485         bitField0_ |= 0x00000002;
15486         onChanged();
15487         return getRegionBFieldBuilder().getBuilder();
15488       }
15489       /**
15490        * <code>required .hbase.pb.RegionSpecifier region_b = 2;</code>
15491        */
15492       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionBOrBuilder() {
15493         if (regionBBuilder_ != null) {
15494           return regionBBuilder_.getMessageOrBuilder();
15495         } else {
15496           return regionB_;
15497         }
15498       }
15499       /**
15500        * <code>required .hbase.pb.RegionSpecifier region_b = 2;</code>
15501        */
15502       private com.google.protobuf.SingleFieldBuilder<
15503           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> 
15504           getRegionBFieldBuilder() {
15505         if (regionBBuilder_ == null) {
15506           regionBBuilder_ = new com.google.protobuf.SingleFieldBuilder<
15507               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
15508                   regionB_,
15509                   getParentForChildren(),
15510                   isClean());
15511           regionB_ = null;
15512         }
15513         return regionBBuilder_;
15514       }
15515 
15516       // optional bool forcible = 3 [default = false];
15517       private boolean forcible_ ;
15518       /**
15519        * <code>optional bool forcible = 3 [default = false];</code>
15520        */
15521       public boolean hasForcible() {
15522         return ((bitField0_ & 0x00000004) == 0x00000004);
15523       }
15524       /**
15525        * <code>optional bool forcible = 3 [default = false];</code>
15526        */
15527       public boolean getForcible() {
15528         return forcible_;
15529       }
15530       /**
15531        * <code>optional bool forcible = 3 [default = false];</code>
15532        */
15533       public Builder setForcible(boolean value) {
15534         bitField0_ |= 0x00000004;
15535         forcible_ = value;
15536         onChanged();
15537         return this;
15538       }
15539       /**
15540        * <code>optional bool forcible = 3 [default = false];</code>
15541        */
15542       public Builder clearForcible() {
15543         bitField0_ = (bitField0_ & ~0x00000004);
15544         forcible_ = false;
15545         onChanged();
15546         return this;
15547       }
15548 
15549       // optional uint64 master_system_time = 4;
15550       private long masterSystemTime_ ;
15551       /**
15552        * <code>optional uint64 master_system_time = 4;</code>
15553        *
15554        * <pre>
15555        * wall clock time from master
15556        * </pre>
15557        */
15558       public boolean hasMasterSystemTime() {
15559         return ((bitField0_ & 0x00000008) == 0x00000008);
15560       }
15561       /**
15562        * <code>optional uint64 master_system_time = 4;</code>
15563        *
15564        * <pre>
15565        * wall clock time from master
15566        * </pre>
15567        */
15568       public long getMasterSystemTime() {
15569         return masterSystemTime_;
15570       }
15571       /**
15572        * <code>optional uint64 master_system_time = 4;</code>
15573        *
15574        * <pre>
15575        * wall clock time from master
15576        * </pre>
15577        */
15578       public Builder setMasterSystemTime(long value) {
15579         bitField0_ |= 0x00000008;
15580         masterSystemTime_ = value;
15581         onChanged();
15582         return this;
15583       }
15584       /**
15585        * <code>optional uint64 master_system_time = 4;</code>
15586        *
15587        * <pre>
15588        * wall clock time from master
15589        * </pre>
15590        */
15591       public Builder clearMasterSystemTime() {
15592         bitField0_ = (bitField0_ & ~0x00000008);
15593         masterSystemTime_ = 0L;
15594         onChanged();
15595         return this;
15596       }
15597 
15598       // @@protoc_insertion_point(builder_scope:hbase.pb.MergeRegionsRequest)
15599     }
15600 
15601     static {
15602       defaultInstance = new MergeRegionsRequest(true);
15603       defaultInstance.initFields();
15604     }
15605 
15606     // @@protoc_insertion_point(class_scope:hbase.pb.MergeRegionsRequest)
15607   }
15608 
15609   public interface MergeRegionsResponseOrBuilder
15610       extends com.google.protobuf.MessageOrBuilder {
15611   }
15612   /**
15613    * Protobuf type {@code hbase.pb.MergeRegionsResponse}
15614    */
15615   public static final class MergeRegionsResponse extends
15616       com.google.protobuf.GeneratedMessage
15617       implements MergeRegionsResponseOrBuilder {
15618     // Use MergeRegionsResponse.newBuilder() to construct.
15619     private MergeRegionsResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
15620       super(builder);
15621       this.unknownFields = builder.getUnknownFields();
15622     }
15623     private MergeRegionsResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
15624 
15625     private static final MergeRegionsResponse defaultInstance;
15626     public static MergeRegionsResponse getDefaultInstance() {
15627       return defaultInstance;
15628     }
15629 
15630     public MergeRegionsResponse getDefaultInstanceForType() {
15631       return defaultInstance;
15632     }
15633 
15634     private final com.google.protobuf.UnknownFieldSet unknownFields;
15635     @java.lang.Override
15636     public final com.google.protobuf.UnknownFieldSet
15637         getUnknownFields() {
15638       return this.unknownFields;
15639     }
15640     private MergeRegionsResponse(
15641         com.google.protobuf.CodedInputStream input,
15642         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15643         throws com.google.protobuf.InvalidProtocolBufferException {
15644       initFields();
15645       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
15646           com.google.protobuf.UnknownFieldSet.newBuilder();
15647       try {
15648         boolean done = false;
15649         while (!done) {
15650           int tag = input.readTag();
15651           switch (tag) {
15652             case 0:
15653               done = true;
15654               break;
15655             default: {
15656               if (!parseUnknownField(input, unknownFields,
15657                                      extensionRegistry, tag)) {
15658                 done = true;
15659               }
15660               break;
15661             }
15662           }
15663         }
15664       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
15665         throw e.setUnfinishedMessage(this);
15666       } catch (java.io.IOException e) {
15667         throw new com.google.protobuf.InvalidProtocolBufferException(
15668             e.getMessage()).setUnfinishedMessage(this);
15669       } finally {
15670         this.unknownFields = unknownFields.build();
15671         makeExtensionsImmutable();
15672       }
15673     }
15674     public static final com.google.protobuf.Descriptors.Descriptor
15675         getDescriptor() {
15676       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_MergeRegionsResponse_descriptor;
15677     }
15678 
15679     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
15680         internalGetFieldAccessorTable() {
15681       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_MergeRegionsResponse_fieldAccessorTable
15682           .ensureFieldAccessorsInitialized(
15683               org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse.Builder.class);
15684     }
15685 
15686     public static com.google.protobuf.Parser<MergeRegionsResponse> PARSER =
15687         new com.google.protobuf.AbstractParser<MergeRegionsResponse>() {
15688       public MergeRegionsResponse parsePartialFrom(
15689           com.google.protobuf.CodedInputStream input,
15690           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15691           throws com.google.protobuf.InvalidProtocolBufferException {
15692         return new MergeRegionsResponse(input, extensionRegistry);
15693       }
15694     };
15695 
15696     @java.lang.Override
15697     public com.google.protobuf.Parser<MergeRegionsResponse> getParserForType() {
15698       return PARSER;
15699     }
15700 
15701     private void initFields() {
15702     }
15703     private byte memoizedIsInitialized = -1;
15704     public final boolean isInitialized() {
15705       byte isInitialized = memoizedIsInitialized;
15706       if (isInitialized != -1) return isInitialized == 1;
15707 
15708       memoizedIsInitialized = 1;
15709       return true;
15710     }
15711 
15712     public void writeTo(com.google.protobuf.CodedOutputStream output)
15713                         throws java.io.IOException {
15714       getSerializedSize();
15715       getUnknownFields().writeTo(output);
15716     }
15717 
15718     private int memoizedSerializedSize = -1;
15719     public int getSerializedSize() {
15720       int size = memoizedSerializedSize;
15721       if (size != -1) return size;
15722 
15723       size = 0;
15724       size += getUnknownFields().getSerializedSize();
15725       memoizedSerializedSize = size;
15726       return size;
15727     }
15728 
15729     private static final long serialVersionUID = 0L;
15730     @java.lang.Override
15731     protected java.lang.Object writeReplace()
15732         throws java.io.ObjectStreamException {
15733       return super.writeReplace();
15734     }
15735 
15736     @java.lang.Override
15737     public boolean equals(final java.lang.Object obj) {
15738       if (obj == this) {
15739        return true;
15740       }
15741       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse)) {
15742         return super.equals(obj);
15743       }
15744       org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse) obj;
15745 
15746       boolean result = true;
15747       result = result &&
15748           getUnknownFields().equals(other.getUnknownFields());
15749       return result;
15750     }
15751 
15752     private int memoizedHashCode = 0;
15753     @java.lang.Override
15754     public int hashCode() {
15755       if (memoizedHashCode != 0) {
15756         return memoizedHashCode;
15757       }
15758       int hash = 41;
15759       hash = (19 * hash) + getDescriptorForType().hashCode();
15760       hash = (29 * hash) + getUnknownFields().hashCode();
15761       memoizedHashCode = hash;
15762       return hash;
15763     }
15764 
15765     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse parseFrom(
15766         com.google.protobuf.ByteString data)
15767         throws com.google.protobuf.InvalidProtocolBufferException {
15768       return PARSER.parseFrom(data);
15769     }
15770     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse parseFrom(
15771         com.google.protobuf.ByteString data,
15772         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15773         throws com.google.protobuf.InvalidProtocolBufferException {
15774       return PARSER.parseFrom(data, extensionRegistry);
15775     }
15776     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse parseFrom(byte[] data)
15777         throws com.google.protobuf.InvalidProtocolBufferException {
15778       return PARSER.parseFrom(data);
15779     }
15780     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse parseFrom(
15781         byte[] data,
15782         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15783         throws com.google.protobuf.InvalidProtocolBufferException {
15784       return PARSER.parseFrom(data, extensionRegistry);
15785     }
15786     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse parseFrom(java.io.InputStream input)
15787         throws java.io.IOException {
15788       return PARSER.parseFrom(input);
15789     }
15790     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse parseFrom(
15791         java.io.InputStream input,
15792         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15793         throws java.io.IOException {
15794       return PARSER.parseFrom(input, extensionRegistry);
15795     }
15796     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse parseDelimitedFrom(java.io.InputStream input)
15797         throws java.io.IOException {
15798       return PARSER.parseDelimitedFrom(input);
15799     }
15800     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse parseDelimitedFrom(
15801         java.io.InputStream input,
15802         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15803         throws java.io.IOException {
15804       return PARSER.parseDelimitedFrom(input, extensionRegistry);
15805     }
15806     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse parseFrom(
15807         com.google.protobuf.CodedInputStream input)
15808         throws java.io.IOException {
15809       return PARSER.parseFrom(input);
15810     }
15811     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse parseFrom(
15812         com.google.protobuf.CodedInputStream input,
15813         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15814         throws java.io.IOException {
15815       return PARSER.parseFrom(input, extensionRegistry);
15816     }
15817 
15818     public static Builder newBuilder() { return Builder.create(); }
15819     public Builder newBuilderForType() { return newBuilder(); }
15820     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse prototype) {
15821       return newBuilder().mergeFrom(prototype);
15822     }
15823     public Builder toBuilder() { return newBuilder(this); }
15824 
15825     @java.lang.Override
15826     protected Builder newBuilderForType(
15827         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
15828       Builder builder = new Builder(parent);
15829       return builder;
15830     }
15831     /**
15832      * Protobuf type {@code hbase.pb.MergeRegionsResponse}
15833      */
15834     public static final class Builder extends
15835         com.google.protobuf.GeneratedMessage.Builder<Builder>
15836        implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponseOrBuilder {
15837       public static final com.google.protobuf.Descriptors.Descriptor
15838           getDescriptor() {
15839         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_MergeRegionsResponse_descriptor;
15840       }
15841 
15842       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
15843           internalGetFieldAccessorTable() {
15844         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_MergeRegionsResponse_fieldAccessorTable
15845             .ensureFieldAccessorsInitialized(
15846                 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse.Builder.class);
15847       }
15848 
15849       // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse.newBuilder()
15850       private Builder() {
15851         maybeForceBuilderInitialization();
15852       }
15853 
15854       private Builder(
15855           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
15856         super(parent);
15857         maybeForceBuilderInitialization();
15858       }
15859       private void maybeForceBuilderInitialization() {
15860         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
15861         }
15862       }
15863       private static Builder create() {
15864         return new Builder();
15865       }
15866 
15867       public Builder clear() {
15868         super.clear();
15869         return this;
15870       }
15871 
15872       public Builder clone() {
15873         return create().mergeFrom(buildPartial());
15874       }
15875 
15876       public com.google.protobuf.Descriptors.Descriptor
15877           getDescriptorForType() {
15878         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_MergeRegionsResponse_descriptor;
15879       }
15880 
15881       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse getDefaultInstanceForType() {
15882         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse.getDefaultInstance();
15883       }
15884 
15885       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse build() {
15886         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse result = buildPartial();
15887         if (!result.isInitialized()) {
15888           throw newUninitializedMessageException(result);
15889         }
15890         return result;
15891       }
15892 
15893       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse buildPartial() {
15894         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse(this);
15895         onBuilt();
15896         return result;
15897       }
15898 
15899       public Builder mergeFrom(com.google.protobuf.Message other) {
15900         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse) {
15901           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse)other);
15902         } else {
15903           super.mergeFrom(other);
15904           return this;
15905         }
15906       }
15907 
15908       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse other) {
15909         if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse.getDefaultInstance()) return this;
15910         this.mergeUnknownFields(other.getUnknownFields());
15911         return this;
15912       }
15913 
15914       public final boolean isInitialized() {
15915         return true;
15916       }
15917 
15918       public Builder mergeFrom(
15919           com.google.protobuf.CodedInputStream input,
15920           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15921           throws java.io.IOException {
15922         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse parsedMessage = null;
15923         try {
15924           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
15925         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
15926           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse) e.getUnfinishedMessage();
15927           throw e;
15928         } finally {
15929           if (parsedMessage != null) {
15930             mergeFrom(parsedMessage);
15931           }
15932         }
15933         return this;
15934       }
15935 
15936       // @@protoc_insertion_point(builder_scope:hbase.pb.MergeRegionsResponse)
15937     }
15938 
15939     static {
15940       defaultInstance = new MergeRegionsResponse(true);
15941       defaultInstance.initFields();
15942     }
15943 
15944     // @@protoc_insertion_point(class_scope:hbase.pb.MergeRegionsResponse)
15945   }
15946 
15947   public interface WALEntryOrBuilder
15948       extends com.google.protobuf.MessageOrBuilder {
15949 
15950     // required .hbase.pb.WALKey key = 1;
15951     /**
15952      * <code>required .hbase.pb.WALKey key = 1;</code>
15953      */
15954     boolean hasKey();
15955     /**
15956      * <code>required .hbase.pb.WALKey key = 1;</code>
15957      */
15958     org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey getKey();
15959     /**
15960      * <code>required .hbase.pb.WALKey key = 1;</code>
15961      */
15962     org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKeyOrBuilder getKeyOrBuilder();
15963 
15964     // repeated bytes key_value_bytes = 2;
15965     /**
15966      * <code>repeated bytes key_value_bytes = 2;</code>
15967      *
15968      * <pre>
15969      * Following may be null if the KVs/Cells are carried along the side in a cellblock (See
15970      * RPC for more on cellblocks). If Cells/KVs are in a cellblock, this next field is null
15971      * and associated_cell_count has count of Cells associated w/ this WALEntry
15972      * </pre>
15973      */
15974     java.util.List<com.google.protobuf.ByteString> getKeyValueBytesList();
15975     /**
15976      * <code>repeated bytes key_value_bytes = 2;</code>
15977      *
15978      * <pre>
15979      * Following may be null if the KVs/Cells are carried along the side in a cellblock (See
15980      * RPC for more on cellblocks). If Cells/KVs are in a cellblock, this next field is null
15981      * and associated_cell_count has count of Cells associated w/ this WALEntry
15982      * </pre>
15983      */
15984     int getKeyValueBytesCount();
15985     /**
15986      * <code>repeated bytes key_value_bytes = 2;</code>
15987      *
15988      * <pre>
15989      * Following may be null if the KVs/Cells are carried along the side in a cellblock (See
15990      * RPC for more on cellblocks). If Cells/KVs are in a cellblock, this next field is null
15991      * and associated_cell_count has count of Cells associated w/ this WALEntry
15992      * </pre>
15993      */
15994     com.google.protobuf.ByteString getKeyValueBytes(int index);
15995 
15996     // optional int32 associated_cell_count = 3;
15997     /**
15998      * <code>optional int32 associated_cell_count = 3;</code>
15999      *
16000      * <pre>
16001      * If Cell data is carried alongside in a cellblock, this is count of Cells in the cellblock.
16002      * </pre>
16003      */
16004     boolean hasAssociatedCellCount();
16005     /**
16006      * <code>optional int32 associated_cell_count = 3;</code>
16007      *
16008      * <pre>
16009      * If Cell data is carried alongside in a cellblock, this is count of Cells in the cellblock.
16010      * </pre>
16011      */
16012     int getAssociatedCellCount();
16013   }
16014   /**
16015    * Protobuf type {@code hbase.pb.WALEntry}
16016    *
16017    * <pre>
16018    * Protocol buffer version of WAL for replication
16019    * </pre>
16020    */
16021   public static final class WALEntry extends
16022       com.google.protobuf.GeneratedMessage
16023       implements WALEntryOrBuilder {
16024     // Use WALEntry.newBuilder() to construct.
16025     private WALEntry(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
16026       super(builder);
16027       this.unknownFields = builder.getUnknownFields();
16028     }
16029     private WALEntry(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
16030 
16031     private static final WALEntry defaultInstance;
16032     public static WALEntry getDefaultInstance() {
16033       return defaultInstance;
16034     }
16035 
16036     public WALEntry getDefaultInstanceForType() {
16037       return defaultInstance;
16038     }
16039 
16040     private final com.google.protobuf.UnknownFieldSet unknownFields;
16041     @java.lang.Override
16042     public final com.google.protobuf.UnknownFieldSet
16043         getUnknownFields() {
16044       return this.unknownFields;
16045     }
16046     private WALEntry(
16047         com.google.protobuf.CodedInputStream input,
16048         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16049         throws com.google.protobuf.InvalidProtocolBufferException {
16050       initFields();
16051       int mutable_bitField0_ = 0;
16052       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
16053           com.google.protobuf.UnknownFieldSet.newBuilder();
16054       try {
16055         boolean done = false;
16056         while (!done) {
16057           int tag = input.readTag();
16058           switch (tag) {
16059             case 0:
16060               done = true;
16061               break;
16062             default: {
16063               if (!parseUnknownField(input, unknownFields,
16064                                      extensionRegistry, tag)) {
16065                 done = true;
16066               }
16067               break;
16068             }
16069             case 10: {
16070               org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.Builder subBuilder = null;
16071               if (((bitField0_ & 0x00000001) == 0x00000001)) {
16072                 subBuilder = key_.toBuilder();
16073               }
16074               key_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.PARSER, extensionRegistry);
16075               if (subBuilder != null) {
16076                 subBuilder.mergeFrom(key_);
16077                 key_ = subBuilder.buildPartial();
16078               }
16079               bitField0_ |= 0x00000001;
16080               break;
16081             }
16082             case 18: {
16083               if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
16084                 keyValueBytes_ = new java.util.ArrayList<com.google.protobuf.ByteString>();
16085                 mutable_bitField0_ |= 0x00000002;
16086               }
16087               keyValueBytes_.add(input.readBytes());
16088               break;
16089             }
16090             case 24: {
16091               bitField0_ |= 0x00000002;
16092               associatedCellCount_ = input.readInt32();
16093               break;
16094             }
16095           }
16096         }
16097       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
16098         throw e.setUnfinishedMessage(this);
16099       } catch (java.io.IOException e) {
16100         throw new com.google.protobuf.InvalidProtocolBufferException(
16101             e.getMessage()).setUnfinishedMessage(this);
16102       } finally {
16103         if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
16104           keyValueBytes_ = java.util.Collections.unmodifiableList(keyValueBytes_);
16105         }
16106         this.unknownFields = unknownFields.build();
16107         makeExtensionsImmutable();
16108       }
16109     }
16110     public static final com.google.protobuf.Descriptors.Descriptor
16111         getDescriptor() {
16112       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_WALEntry_descriptor;
16113     }
16114 
16115     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
16116         internalGetFieldAccessorTable() {
16117       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_WALEntry_fieldAccessorTable
16118           .ensureFieldAccessorsInitialized(
16119               org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder.class);
16120     }
16121 
16122     public static com.google.protobuf.Parser<WALEntry> PARSER =
16123         new com.google.protobuf.AbstractParser<WALEntry>() {
16124       public WALEntry parsePartialFrom(
16125           com.google.protobuf.CodedInputStream input,
16126           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16127           throws com.google.protobuf.InvalidProtocolBufferException {
16128         return new WALEntry(input, extensionRegistry);
16129       }
16130     };
16131 
16132     @java.lang.Override
16133     public com.google.protobuf.Parser<WALEntry> getParserForType() {
16134       return PARSER;
16135     }
16136 
16137     private int bitField0_;
16138     // required .hbase.pb.WALKey key = 1;
16139     public static final int KEY_FIELD_NUMBER = 1;
16140     private org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey key_;
16141     /**
16142      * <code>required .hbase.pb.WALKey key = 1;</code>
16143      */
16144     public boolean hasKey() {
16145       return ((bitField0_ & 0x00000001) == 0x00000001);
16146     }
16147     /**
16148      * <code>required .hbase.pb.WALKey key = 1;</code>
16149      */
16150     public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey getKey() {
16151       return key_;
16152     }
16153     /**
16154      * <code>required .hbase.pb.WALKey key = 1;</code>
16155      */
16156     public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKeyOrBuilder getKeyOrBuilder() {
16157       return key_;
16158     }
16159 
16160     // repeated bytes key_value_bytes = 2;
16161     public static final int KEY_VALUE_BYTES_FIELD_NUMBER = 2;
16162     private java.util.List<com.google.protobuf.ByteString> keyValueBytes_;
16163     /**
16164      * <code>repeated bytes key_value_bytes = 2;</code>
16165      *
16166      * <pre>
16167      * Following may be null if the KVs/Cells are carried along the side in a cellblock (See
16168      * RPC for more on cellblocks). If Cells/KVs are in a cellblock, this next field is null
16169      * and associated_cell_count has count of Cells associated w/ this WALEntry
16170      * </pre>
16171      */
16172     public java.util.List<com.google.protobuf.ByteString>
16173         getKeyValueBytesList() {
16174       return keyValueBytes_;
16175     }
16176     /**
16177      * <code>repeated bytes key_value_bytes = 2;</code>
16178      *
16179      * <pre>
16180      * Following may be null if the KVs/Cells are carried along the side in a cellblock (See
16181      * RPC for more on cellblocks). If Cells/KVs are in a cellblock, this next field is null
16182      * and associated_cell_count has count of Cells associated w/ this WALEntry
16183      * </pre>
16184      */
16185     public int getKeyValueBytesCount() {
16186       return keyValueBytes_.size();
16187     }
16188     /**
16189      * <code>repeated bytes key_value_bytes = 2;</code>
16190      *
16191      * <pre>
16192      * Following may be null if the KVs/Cells are carried along the side in a cellblock (See
16193      * RPC for more on cellblocks). If Cells/KVs are in a cellblock, this next field is null
16194      * and associated_cell_count has count of Cells associated w/ this WALEntry
16195      * </pre>
16196      */
16197     public com.google.protobuf.ByteString getKeyValueBytes(int index) {
16198       return keyValueBytes_.get(index);
16199     }
16200 
16201     // optional int32 associated_cell_count = 3;
16202     public static final int ASSOCIATED_CELL_COUNT_FIELD_NUMBER = 3;
16203     private int associatedCellCount_;
16204     /**
16205      * <code>optional int32 associated_cell_count = 3;</code>
16206      *
16207      * <pre>
16208      * If Cell data is carried alongside in a cellblock, this is count of Cells in the cellblock.
16209      * </pre>
16210      */
16211     public boolean hasAssociatedCellCount() {
16212       return ((bitField0_ & 0x00000002) == 0x00000002);
16213     }
16214     /**
16215      * <code>optional int32 associated_cell_count = 3;</code>
16216      *
16217      * <pre>
16218      * If Cell data is carried alongside in a cellblock, this is count of Cells in the cellblock.
16219      * </pre>
16220      */
16221     public int getAssociatedCellCount() {
16222       return associatedCellCount_;
16223     }
16224 
16225     private void initFields() {
16226       key_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.getDefaultInstance();
16227       keyValueBytes_ = java.util.Collections.emptyList();
16228       associatedCellCount_ = 0;
16229     }
16230     private byte memoizedIsInitialized = -1;
16231     public final boolean isInitialized() {
16232       byte isInitialized = memoizedIsInitialized;
16233       if (isInitialized != -1) return isInitialized == 1;
16234 
16235       if (!hasKey()) {
16236         memoizedIsInitialized = 0;
16237         return false;
16238       }
16239       if (!getKey().isInitialized()) {
16240         memoizedIsInitialized = 0;
16241         return false;
16242       }
16243       memoizedIsInitialized = 1;
16244       return true;
16245     }
16246 
16247     public void writeTo(com.google.protobuf.CodedOutputStream output)
16248                         throws java.io.IOException {
16249       getSerializedSize();
16250       if (((bitField0_ & 0x00000001) == 0x00000001)) {
16251         output.writeMessage(1, key_);
16252       }
16253       for (int i = 0; i < keyValueBytes_.size(); i++) {
16254         output.writeBytes(2, keyValueBytes_.get(i));
16255       }
16256       if (((bitField0_ & 0x00000002) == 0x00000002)) {
16257         output.writeInt32(3, associatedCellCount_);
16258       }
16259       getUnknownFields().writeTo(output);
16260     }
16261 
16262     private int memoizedSerializedSize = -1;
16263     public int getSerializedSize() {
16264       int size = memoizedSerializedSize;
16265       if (size != -1) return size;
16266 
16267       size = 0;
16268       if (((bitField0_ & 0x00000001) == 0x00000001)) {
16269         size += com.google.protobuf.CodedOutputStream
16270           .computeMessageSize(1, key_);
16271       }
16272       {
16273         int dataSize = 0;
16274         for (int i = 0; i < keyValueBytes_.size(); i++) {
16275           dataSize += com.google.protobuf.CodedOutputStream
16276             .computeBytesSizeNoTag(keyValueBytes_.get(i));
16277         }
16278         size += dataSize;
16279         size += 1 * getKeyValueBytesList().size();
16280       }
16281       if (((bitField0_ & 0x00000002) == 0x00000002)) {
16282         size += com.google.protobuf.CodedOutputStream
16283           .computeInt32Size(3, associatedCellCount_);
16284       }
16285       size += getUnknownFields().getSerializedSize();
16286       memoizedSerializedSize = size;
16287       return size;
16288     }
16289 
16290     private static final long serialVersionUID = 0L;
16291     @java.lang.Override
16292     protected java.lang.Object writeReplace()
16293         throws java.io.ObjectStreamException {
16294       return super.writeReplace();
16295     }
16296 
16297     @java.lang.Override
16298     public boolean equals(final java.lang.Object obj) {
16299       if (obj == this) {
16300        return true;
16301       }
16302       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry)) {
16303         return super.equals(obj);
16304       }
16305       org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry) obj;
16306 
16307       boolean result = true;
16308       result = result && (hasKey() == other.hasKey());
16309       if (hasKey()) {
16310         result = result && getKey()
16311             .equals(other.getKey());
16312       }
16313       result = result && getKeyValueBytesList()
16314           .equals(other.getKeyValueBytesList());
16315       result = result && (hasAssociatedCellCount() == other.hasAssociatedCellCount());
16316       if (hasAssociatedCellCount()) {
16317         result = result && (getAssociatedCellCount()
16318             == other.getAssociatedCellCount());
16319       }
16320       result = result &&
16321           getUnknownFields().equals(other.getUnknownFields());
16322       return result;
16323     }
16324 
16325     private int memoizedHashCode = 0;
16326     @java.lang.Override
16327     public int hashCode() {
16328       if (memoizedHashCode != 0) {
16329         return memoizedHashCode;
16330       }
16331       int hash = 41;
16332       hash = (19 * hash) + getDescriptorForType().hashCode();
16333       if (hasKey()) {
16334         hash = (37 * hash) + KEY_FIELD_NUMBER;
16335         hash = (53 * hash) + getKey().hashCode();
16336       }
16337       if (getKeyValueBytesCount() > 0) {
16338         hash = (37 * hash) + KEY_VALUE_BYTES_FIELD_NUMBER;
16339         hash = (53 * hash) + getKeyValueBytesList().hashCode();
16340       }
16341       if (hasAssociatedCellCount()) {
16342         hash = (37 * hash) + ASSOCIATED_CELL_COUNT_FIELD_NUMBER;
16343         hash = (53 * hash) + getAssociatedCellCount();
16344       }
16345       hash = (29 * hash) + getUnknownFields().hashCode();
16346       memoizedHashCode = hash;
16347       return hash;
16348     }
16349 
16350     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom(
16351         com.google.protobuf.ByteString data)
16352         throws com.google.protobuf.InvalidProtocolBufferException {
16353       return PARSER.parseFrom(data);
16354     }
16355     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom(
16356         com.google.protobuf.ByteString data,
16357         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16358         throws com.google.protobuf.InvalidProtocolBufferException {
16359       return PARSER.parseFrom(data, extensionRegistry);
16360     }
16361     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom(byte[] data)
16362         throws com.google.protobuf.InvalidProtocolBufferException {
16363       return PARSER.parseFrom(data);
16364     }
16365     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom(
16366         byte[] data,
16367         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16368         throws com.google.protobuf.InvalidProtocolBufferException {
16369       return PARSER.parseFrom(data, extensionRegistry);
16370     }
16371     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom(java.io.InputStream input)
16372         throws java.io.IOException {
16373       return PARSER.parseFrom(input);
16374     }
16375     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom(
16376         java.io.InputStream input,
16377         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16378         throws java.io.IOException {
16379       return PARSER.parseFrom(input, extensionRegistry);
16380     }
16381     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseDelimitedFrom(java.io.InputStream input)
16382         throws java.io.IOException {
16383       return PARSER.parseDelimitedFrom(input);
16384     }
16385     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseDelimitedFrom(
16386         java.io.InputStream input,
16387         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16388         throws java.io.IOException {
16389       return PARSER.parseDelimitedFrom(input, extensionRegistry);
16390     }
16391     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom(
16392         com.google.protobuf.CodedInputStream input)
16393         throws java.io.IOException {
16394       return PARSER.parseFrom(input);
16395     }
16396     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom(
16397         com.google.protobuf.CodedInputStream input,
16398         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16399         throws java.io.IOException {
16400       return PARSER.parseFrom(input, extensionRegistry);
16401     }
16402 
16403     public static Builder newBuilder() { return Builder.create(); }
16404     public Builder newBuilderForType() { return newBuilder(); }
16405     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry prototype) {
16406       return newBuilder().mergeFrom(prototype);
16407     }
16408     public Builder toBuilder() { return newBuilder(this); }
16409 
16410     @java.lang.Override
16411     protected Builder newBuilderForType(
16412         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
16413       Builder builder = new Builder(parent);
16414       return builder;
16415     }
16416     /**
16417      * Protobuf type {@code hbase.pb.WALEntry}
16418      *
16419      * <pre>
16420      * Protocol buffer version of WAL for replication
16421      * </pre>
16422      */
16423     public static final class Builder extends
16424         com.google.protobuf.GeneratedMessage.Builder<Builder>
16425        implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder {
16426       public static final com.google.protobuf.Descriptors.Descriptor
16427           getDescriptor() {
16428         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_WALEntry_descriptor;
16429       }
16430 
16431       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
16432           internalGetFieldAccessorTable() {
16433         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_WALEntry_fieldAccessorTable
16434             .ensureFieldAccessorsInitialized(
16435                 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder.class);
16436       }
16437 
16438       // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.newBuilder()
16439       private Builder() {
16440         maybeForceBuilderInitialization();
16441       }
16442 
16443       private Builder(
16444           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
16445         super(parent);
16446         maybeForceBuilderInitialization();
16447       }
16448       private void maybeForceBuilderInitialization() {
16449         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
16450           getKeyFieldBuilder();
16451         }
16452       }
16453       private static Builder create() {
16454         return new Builder();
16455       }
16456 
16457       public Builder clear() {
16458         super.clear();
16459         if (keyBuilder_ == null) {
16460           key_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.getDefaultInstance();
16461         } else {
16462           keyBuilder_.clear();
16463         }
16464         bitField0_ = (bitField0_ & ~0x00000001);
16465         keyValueBytes_ = java.util.Collections.emptyList();
16466         bitField0_ = (bitField0_ & ~0x00000002);
16467         associatedCellCount_ = 0;
16468         bitField0_ = (bitField0_ & ~0x00000004);
16469         return this;
16470       }
16471 
16472       public Builder clone() {
16473         return create().mergeFrom(buildPartial());
16474       }
16475 
16476       public com.google.protobuf.Descriptors.Descriptor
16477           getDescriptorForType() {
16478         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_WALEntry_descriptor;
16479       }
16480 
16481       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry getDefaultInstanceForType() {
16482         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.getDefaultInstance();
16483       }
16484 
16485       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry build() {
16486         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry result = buildPartial();
16487         if (!result.isInitialized()) {
16488           throw newUninitializedMessageException(result);
16489         }
16490         return result;
16491       }
16492 
16493       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry buildPartial() {
16494         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry(this);
16495         int from_bitField0_ = bitField0_;
16496         int to_bitField0_ = 0;
16497         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
16498           to_bitField0_ |= 0x00000001;
16499         }
16500         if (keyBuilder_ == null) {
16501           result.key_ = key_;
16502         } else {
16503           result.key_ = keyBuilder_.build();
16504         }
16505         if (((bitField0_ & 0x00000002) == 0x00000002)) {
16506           keyValueBytes_ = java.util.Collections.unmodifiableList(keyValueBytes_);
16507           bitField0_ = (bitField0_ & ~0x00000002);
16508         }
16509         result.keyValueBytes_ = keyValueBytes_;
16510         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
16511           to_bitField0_ |= 0x00000002;
16512         }
16513         result.associatedCellCount_ = associatedCellCount_;
16514         result.bitField0_ = to_bitField0_;
16515         onBuilt();
16516         return result;
16517       }
16518 
16519       public Builder mergeFrom(com.google.protobuf.Message other) {
16520         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry) {
16521           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry)other);
16522         } else {
16523           super.mergeFrom(other);
16524           return this;
16525         }
16526       }
16527 
16528       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry other) {
16529         if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.getDefaultInstance()) return this;
16530         if (other.hasKey()) {
16531           mergeKey(other.getKey());
16532         }
16533         if (!other.keyValueBytes_.isEmpty()) {
16534           if (keyValueBytes_.isEmpty()) {
16535             keyValueBytes_ = other.keyValueBytes_;
16536             bitField0_ = (bitField0_ & ~0x00000002);
16537           } else {
16538             ensureKeyValueBytesIsMutable();
16539             keyValueBytes_.addAll(other.keyValueBytes_);
16540           }
16541           onChanged();
16542         }
16543         if (other.hasAssociatedCellCount()) {
16544           setAssociatedCellCount(other.getAssociatedCellCount());
16545         }
16546         this.mergeUnknownFields(other.getUnknownFields());
16547         return this;
16548       }
16549 
16550       public final boolean isInitialized() {
16551         if (!hasKey()) {
16552           
16553           return false;
16554         }
16555         if (!getKey().isInitialized()) {
16556           
16557           return false;
16558         }
16559         return true;
16560       }
16561 
16562       public Builder mergeFrom(
16563           com.google.protobuf.CodedInputStream input,
16564           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16565           throws java.io.IOException {
16566         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parsedMessage = null;
16567         try {
16568           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
16569         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
16570           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry) e.getUnfinishedMessage();
16571           throw e;
16572         } finally {
16573           if (parsedMessage != null) {
16574             mergeFrom(parsedMessage);
16575           }
16576         }
16577         return this;
16578       }
16579       private int bitField0_;
16580 
16581       // required .hbase.pb.WALKey key = 1;
16582       private org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey key_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.getDefaultInstance();
16583       private com.google.protobuf.SingleFieldBuilder<
16584           org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey, org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKeyOrBuilder> keyBuilder_;
16585       /**
16586        * <code>required .hbase.pb.WALKey key = 1;</code>
16587        */
16588       public boolean hasKey() {
16589         return ((bitField0_ & 0x00000001) == 0x00000001);
16590       }
16591       /**
16592        * <code>required .hbase.pb.WALKey key = 1;</code>
16593        */
16594       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey getKey() {
16595         if (keyBuilder_ == null) {
16596           return key_;
16597         } else {
16598           return keyBuilder_.getMessage();
16599         }
16600       }
16601       /**
16602        * <code>required .hbase.pb.WALKey key = 1;</code>
16603        */
16604       public Builder setKey(org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey value) {
16605         if (keyBuilder_ == null) {
16606           if (value == null) {
16607             throw new NullPointerException();
16608           }
16609           key_ = value;
16610           onChanged();
16611         } else {
16612           keyBuilder_.setMessage(value);
16613         }
16614         bitField0_ |= 0x00000001;
16615         return this;
16616       }
16617       /**
16618        * <code>required .hbase.pb.WALKey key = 1;</code>
16619        */
16620       public Builder setKey(
16621           org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.Builder builderForValue) {
16622         if (keyBuilder_ == null) {
16623           key_ = builderForValue.build();
16624           onChanged();
16625         } else {
16626           keyBuilder_.setMessage(builderForValue.build());
16627         }
16628         bitField0_ |= 0x00000001;
16629         return this;
16630       }
16631       /**
16632        * <code>required .hbase.pb.WALKey key = 1;</code>
16633        */
16634       public Builder mergeKey(org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey value) {
16635         if (keyBuilder_ == null) {
16636           if (((bitField0_ & 0x00000001) == 0x00000001) &&
16637               key_ != org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.getDefaultInstance()) {
16638             key_ =
16639               org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.newBuilder(key_).mergeFrom(value).buildPartial();
16640           } else {
16641             key_ = value;
16642           }
16643           onChanged();
16644         } else {
16645           keyBuilder_.mergeFrom(value);
16646         }
16647         bitField0_ |= 0x00000001;
16648         return this;
16649       }
16650       /**
16651        * <code>required .hbase.pb.WALKey key = 1;</code>
16652        */
16653       public Builder clearKey() {
16654         if (keyBuilder_ == null) {
16655           key_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.getDefaultInstance();
16656           onChanged();
16657         } else {
16658           keyBuilder_.clear();
16659         }
16660         bitField0_ = (bitField0_ & ~0x00000001);
16661         return this;
16662       }
16663       /**
16664        * <code>required .hbase.pb.WALKey key = 1;</code>
16665        */
16666       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.Builder getKeyBuilder() {
16667         bitField0_ |= 0x00000001;
16668         onChanged();
16669         return getKeyFieldBuilder().getBuilder();
16670       }
16671       /**
16672        * <code>required .hbase.pb.WALKey key = 1;</code>
16673        */
16674       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKeyOrBuilder getKeyOrBuilder() {
16675         if (keyBuilder_ != null) {
16676           return keyBuilder_.getMessageOrBuilder();
16677         } else {
16678           return key_;
16679         }
16680       }
16681       /**
16682        * <code>required .hbase.pb.WALKey key = 1;</code>
16683        */
16684       private com.google.protobuf.SingleFieldBuilder<
16685           org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey, org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKeyOrBuilder> 
16686           getKeyFieldBuilder() {
16687         if (keyBuilder_ == null) {
16688           keyBuilder_ = new com.google.protobuf.SingleFieldBuilder<
16689               org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey, org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKeyOrBuilder>(
16690                   key_,
16691                   getParentForChildren(),
16692                   isClean());
16693           key_ = null;
16694         }
16695         return keyBuilder_;
16696       }
16697 
16698       // repeated bytes key_value_bytes = 2;
16699       private java.util.List<com.google.protobuf.ByteString> keyValueBytes_ = java.util.Collections.emptyList();
16700       private void ensureKeyValueBytesIsMutable() {
16701         if (!((bitField0_ & 0x00000002) == 0x00000002)) {
16702           keyValueBytes_ = new java.util.ArrayList<com.google.protobuf.ByteString>(keyValueBytes_);
16703           bitField0_ |= 0x00000002;
16704          }
16705       }
16706       /**
16707        * <code>repeated bytes key_value_bytes = 2;</code>
16708        *
16709        * <pre>
16710        * Following may be null if the KVs/Cells are carried along the side in a cellblock (See
16711        * RPC for more on cellblocks). If Cells/KVs are in a cellblock, this next field is null
16712        * and associated_cell_count has count of Cells associated w/ this WALEntry
16713        * </pre>
16714        */
16715       public java.util.List<com.google.protobuf.ByteString>
16716           getKeyValueBytesList() {
16717         return java.util.Collections.unmodifiableList(keyValueBytes_);
16718       }
16719       /**
16720        * <code>repeated bytes key_value_bytes = 2;</code>
16721        *
16722        * <pre>
16723        * Following may be null if the KVs/Cells are carried along the side in a cellblock (See
16724        * RPC for more on cellblocks). If Cells/KVs are in a cellblock, this next field is null
16725        * and associated_cell_count has count of Cells associated w/ this WALEntry
16726        * </pre>
16727        */
16728       public int getKeyValueBytesCount() {
16729         return keyValueBytes_.size();
16730       }
16731       /**
16732        * <code>repeated bytes key_value_bytes = 2;</code>
16733        *
16734        * <pre>
16735        * Following may be null if the KVs/Cells are carried along the side in a cellblock (See
16736        * RPC for more on cellblocks). If Cells/KVs are in a cellblock, this next field is null
16737        * and associated_cell_count has count of Cells associated w/ this WALEntry
16738        * </pre>
16739        */
16740       public com.google.protobuf.ByteString getKeyValueBytes(int index) {
16741         return keyValueBytes_.get(index);
16742       }
16743       /**
16744        * <code>repeated bytes key_value_bytes = 2;</code>
16745        *
16746        * <pre>
16747        * Following may be null if the KVs/Cells are carried along the side in a cellblock (See
16748        * RPC for more on cellblocks). If Cells/KVs are in a cellblock, this next field is null
16749        * and associated_cell_count has count of Cells associated w/ this WALEntry
16750        * </pre>
16751        */
16752       public Builder setKeyValueBytes(
16753           int index, com.google.protobuf.ByteString value) {
16754         if (value == null) {
16755     throw new NullPointerException();
16756   }
16757   ensureKeyValueBytesIsMutable();
16758         keyValueBytes_.set(index, value);
16759         onChanged();
16760         return this;
16761       }
16762       /**
16763        * <code>repeated bytes key_value_bytes = 2;</code>
16764        *
16765        * <pre>
16766        * Following may be null if the KVs/Cells are carried along the side in a cellblock (See
16767        * RPC for more on cellblocks). If Cells/KVs are in a cellblock, this next field is null
16768        * and associated_cell_count has count of Cells associated w/ this WALEntry
16769        * </pre>
16770        */
16771       public Builder addKeyValueBytes(com.google.protobuf.ByteString value) {
16772         if (value == null) {
16773     throw new NullPointerException();
16774   }
16775   ensureKeyValueBytesIsMutable();
16776         keyValueBytes_.add(value);
16777         onChanged();
16778         return this;
16779       }
16780       /**
16781        * <code>repeated bytes key_value_bytes = 2;</code>
16782        *
16783        * <pre>
16784        * Following may be null if the KVs/Cells are carried along the side in a cellblock (See
16785        * RPC for more on cellblocks). If Cells/KVs are in a cellblock, this next field is null
16786        * and associated_cell_count has count of Cells associated w/ this WALEntry
16787        * </pre>
16788        */
16789       public Builder addAllKeyValueBytes(
16790           java.lang.Iterable<? extends com.google.protobuf.ByteString> values) {
16791         ensureKeyValueBytesIsMutable();
16792         super.addAll(values, keyValueBytes_);
16793         onChanged();
16794         return this;
16795       }
16796       /**
16797        * <code>repeated bytes key_value_bytes = 2;</code>
16798        *
16799        * <pre>
16800        * Following may be null if the KVs/Cells are carried along the side in a cellblock (See
16801        * RPC for more on cellblocks). If Cells/KVs are in a cellblock, this next field is null
16802        * and associated_cell_count has count of Cells associated w/ this WALEntry
16803        * </pre>
16804        */
16805       public Builder clearKeyValueBytes() {
16806         keyValueBytes_ = java.util.Collections.emptyList();
16807         bitField0_ = (bitField0_ & ~0x00000002);
16808         onChanged();
16809         return this;
16810       }
16811 
16812       // optional int32 associated_cell_count = 3;
16813       private int associatedCellCount_ ;
16814       /**
16815        * <code>optional int32 associated_cell_count = 3;</code>
16816        *
16817        * <pre>
16818        * If Cell data is carried alongside in a cellblock, this is count of Cells in the cellblock.
16819        * </pre>
16820        */
16821       public boolean hasAssociatedCellCount() {
16822         return ((bitField0_ & 0x00000004) == 0x00000004);
16823       }
16824       /**
16825        * <code>optional int32 associated_cell_count = 3;</code>
16826        *
16827        * <pre>
16828        * If Cell data is carried alongside in a cellblock, this is count of Cells in the cellblock.
16829        * </pre>
16830        */
16831       public int getAssociatedCellCount() {
16832         return associatedCellCount_;
16833       }
16834       /**
16835        * <code>optional int32 associated_cell_count = 3;</code>
16836        *
16837        * <pre>
16838        * If Cell data is carried alongside in a cellblock, this is count of Cells in the cellblock.
16839        * </pre>
16840        */
16841       public Builder setAssociatedCellCount(int value) {
16842         bitField0_ |= 0x00000004;
16843         associatedCellCount_ = value;
16844         onChanged();
16845         return this;
16846       }
16847       /**
16848        * <code>optional int32 associated_cell_count = 3;</code>
16849        *
16850        * <pre>
16851        * If Cell data is carried alongside in a cellblock, this is count of Cells in the cellblock.
16852        * </pre>
16853        */
16854       public Builder clearAssociatedCellCount() {
16855         bitField0_ = (bitField0_ & ~0x00000004);
16856         associatedCellCount_ = 0;
16857         onChanged();
16858         return this;
16859       }
16860 
16861       // @@protoc_insertion_point(builder_scope:hbase.pb.WALEntry)
16862     }
16863 
16864     static {
16865       defaultInstance = new WALEntry(true);
16866       defaultInstance.initFields();
16867     }
16868 
16869     // @@protoc_insertion_point(class_scope:hbase.pb.WALEntry)
16870   }
16871 
16872   public interface ReplicateWALEntryRequestOrBuilder
16873       extends com.google.protobuf.MessageOrBuilder {
16874 
16875     // repeated .hbase.pb.WALEntry entry = 1;
16876     /**
16877      * <code>repeated .hbase.pb.WALEntry entry = 1;</code>
16878      */
16879     java.util.List<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry> 
16880         getEntryList();
16881     /**
16882      * <code>repeated .hbase.pb.WALEntry entry = 1;</code>
16883      */
16884     org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry getEntry(int index);
16885     /**
16886      * <code>repeated .hbase.pb.WALEntry entry = 1;</code>
16887      */
16888     int getEntryCount();
16889     /**
16890      * <code>repeated .hbase.pb.WALEntry entry = 1;</code>
16891      */
16892     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder> 
16893         getEntryOrBuilderList();
16894     /**
16895      * <code>repeated .hbase.pb.WALEntry entry = 1;</code>
16896      */
16897     org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder getEntryOrBuilder(
16898         int index);
16899   }
16900   /**
16901    * Protobuf type {@code hbase.pb.ReplicateWALEntryRequest}
16902    *
16903    * <pre>
16904    **
16905    * Replicates the given entries. The guarantee is that the given entries
16906    * will be durable on the slave cluster if this method returns without
16907    * any exception.  hbase.replication has to be set to true for this to work.
16908    * </pre>
16909    */
16910   public static final class ReplicateWALEntryRequest extends
16911       com.google.protobuf.GeneratedMessage
16912       implements ReplicateWALEntryRequestOrBuilder {
16913     // Use ReplicateWALEntryRequest.newBuilder() to construct.
16914     private ReplicateWALEntryRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
16915       super(builder);
16916       this.unknownFields = builder.getUnknownFields();
16917     }
16918     private ReplicateWALEntryRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
16919 
16920     private static final ReplicateWALEntryRequest defaultInstance;
16921     public static ReplicateWALEntryRequest getDefaultInstance() {
16922       return defaultInstance;
16923     }
16924 
16925     public ReplicateWALEntryRequest getDefaultInstanceForType() {
16926       return defaultInstance;
16927     }
16928 
16929     private final com.google.protobuf.UnknownFieldSet unknownFields;
16930     @java.lang.Override
16931     public final com.google.protobuf.UnknownFieldSet
16932         getUnknownFields() {
16933       return this.unknownFields;
16934     }
16935     private ReplicateWALEntryRequest(
16936         com.google.protobuf.CodedInputStream input,
16937         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16938         throws com.google.protobuf.InvalidProtocolBufferException {
16939       initFields();
16940       int mutable_bitField0_ = 0;
16941       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
16942           com.google.protobuf.UnknownFieldSet.newBuilder();
16943       try {
16944         boolean done = false;
16945         while (!done) {
16946           int tag = input.readTag();
16947           switch (tag) {
16948             case 0:
16949               done = true;
16950               break;
16951             default: {
16952               if (!parseUnknownField(input, unknownFields,
16953                                      extensionRegistry, tag)) {
16954                 done = true;
16955               }
16956               break;
16957             }
16958             case 10: {
16959               if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
16960                 entry_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry>();
16961                 mutable_bitField0_ |= 0x00000001;
16962               }
16963               entry_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.PARSER, extensionRegistry));
16964               break;
16965             }
16966           }
16967         }
16968       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
16969         throw e.setUnfinishedMessage(this);
16970       } catch (java.io.IOException e) {
16971         throw new com.google.protobuf.InvalidProtocolBufferException(
16972             e.getMessage()).setUnfinishedMessage(this);
16973       } finally {
16974         if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
16975           entry_ = java.util.Collections.unmodifiableList(entry_);
16976         }
16977         this.unknownFields = unknownFields.build();
16978         makeExtensionsImmutable();
16979       }
16980     }
16981     public static final com.google.protobuf.Descriptors.Descriptor
16982         getDescriptor() {
16983       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_ReplicateWALEntryRequest_descriptor;
16984     }
16985 
16986     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
16987         internalGetFieldAccessorTable() {
16988       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_ReplicateWALEntryRequest_fieldAccessorTable
16989           .ensureFieldAccessorsInitialized(
16990               org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.Builder.class);
16991     }
16992 
16993     public static com.google.protobuf.Parser<ReplicateWALEntryRequest> PARSER =
16994         new com.google.protobuf.AbstractParser<ReplicateWALEntryRequest>() {
16995       public ReplicateWALEntryRequest parsePartialFrom(
16996           com.google.protobuf.CodedInputStream input,
16997           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16998           throws com.google.protobuf.InvalidProtocolBufferException {
16999         return new ReplicateWALEntryRequest(input, extensionRegistry);
17000       }
17001     };
17002 
17003     @java.lang.Override
17004     public com.google.protobuf.Parser<ReplicateWALEntryRequest> getParserForType() {
17005       return PARSER;
17006     }
17007 
17008     // repeated .hbase.pb.WALEntry entry = 1;
17009     public static final int ENTRY_FIELD_NUMBER = 1;
17010     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry> entry_;
17011     /**
17012      * <code>repeated .hbase.pb.WALEntry entry = 1;</code>
17013      */
17014     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry> getEntryList() {
17015       return entry_;
17016     }
17017     /**
17018      * <code>repeated .hbase.pb.WALEntry entry = 1;</code>
17019      */
17020     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder> 
17021         getEntryOrBuilderList() {
17022       return entry_;
17023     }
17024     /**
17025      * <code>repeated .hbase.pb.WALEntry entry = 1;</code>
17026      */
17027     public int getEntryCount() {
17028       return entry_.size();
17029     }
17030     /**
17031      * <code>repeated .hbase.pb.WALEntry entry = 1;</code>
17032      */
17033     public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry getEntry(int index) {
17034       return entry_.get(index);
17035     }
17036     /**
17037      * <code>repeated .hbase.pb.WALEntry entry = 1;</code>
17038      */
17039     public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder getEntryOrBuilder(
17040         int index) {
17041       return entry_.get(index);
17042     }
17043 
17044     private void initFields() {
17045       entry_ = java.util.Collections.emptyList();
17046     }
17047     private byte memoizedIsInitialized = -1;
17048     public final boolean isInitialized() {
17049       byte isInitialized = memoizedIsInitialized;
17050       if (isInitialized != -1) return isInitialized == 1;
17051 
17052       for (int i = 0; i < getEntryCount(); i++) {
17053         if (!getEntry(i).isInitialized()) {
17054           memoizedIsInitialized = 0;
17055           return false;
17056         }
17057       }
17058       memoizedIsInitialized = 1;
17059       return true;
17060     }
17061 
17062     public void writeTo(com.google.protobuf.CodedOutputStream output)
17063                         throws java.io.IOException {
17064       getSerializedSize();
17065       for (int i = 0; i < entry_.size(); i++) {
17066         output.writeMessage(1, entry_.get(i));
17067       }
17068       getUnknownFields().writeTo(output);
17069     }
17070 
17071     private int memoizedSerializedSize = -1;
17072     public int getSerializedSize() {
17073       int size = memoizedSerializedSize;
17074       if (size != -1) return size;
17075 
17076       size = 0;
17077       for (int i = 0; i < entry_.size(); i++) {
17078         size += com.google.protobuf.CodedOutputStream
17079           .computeMessageSize(1, entry_.get(i));
17080       }
17081       size += getUnknownFields().getSerializedSize();
17082       memoizedSerializedSize = size;
17083       return size;
17084     }
17085 
17086     private static final long serialVersionUID = 0L;
17087     @java.lang.Override
17088     protected java.lang.Object writeReplace()
17089         throws java.io.ObjectStreamException {
17090       return super.writeReplace();
17091     }
17092 
17093     @java.lang.Override
17094     public boolean equals(final java.lang.Object obj) {
17095       if (obj == this) {
17096        return true;
17097       }
17098       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest)) {
17099         return super.equals(obj);
17100       }
17101       org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest) obj;
17102 
17103       boolean result = true;
17104       result = result && getEntryList()
17105           .equals(other.getEntryList());
17106       result = result &&
17107           getUnknownFields().equals(other.getUnknownFields());
17108       return result;
17109     }
17110 
17111     private int memoizedHashCode = 0;
17112     @java.lang.Override
17113     public int hashCode() {
17114       if (memoizedHashCode != 0) {
17115         return memoizedHashCode;
17116       }
17117       int hash = 41;
17118       hash = (19 * hash) + getDescriptorForType().hashCode();
17119       if (getEntryCount() > 0) {
17120         hash = (37 * hash) + ENTRY_FIELD_NUMBER;
17121         hash = (53 * hash) + getEntryList().hashCode();
17122       }
17123       hash = (29 * hash) + getUnknownFields().hashCode();
17124       memoizedHashCode = hash;
17125       return hash;
17126     }
17127 
17128     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom(
17129         com.google.protobuf.ByteString data)
17130         throws com.google.protobuf.InvalidProtocolBufferException {
17131       return PARSER.parseFrom(data);
17132     }
17133     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom(
17134         com.google.protobuf.ByteString data,
17135         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17136         throws com.google.protobuf.InvalidProtocolBufferException {
17137       return PARSER.parseFrom(data, extensionRegistry);
17138     }
17139     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom(byte[] data)
17140         throws com.google.protobuf.InvalidProtocolBufferException {
17141       return PARSER.parseFrom(data);
17142     }
17143     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom(
17144         byte[] data,
17145         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17146         throws com.google.protobuf.InvalidProtocolBufferException {
17147       return PARSER.parseFrom(data, extensionRegistry);
17148     }
17149     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom(java.io.InputStream input)
17150         throws java.io.IOException {
17151       return PARSER.parseFrom(input);
17152     }
17153     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom(
17154         java.io.InputStream input,
17155         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17156         throws java.io.IOException {
17157       return PARSER.parseFrom(input, extensionRegistry);
17158     }
17159     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseDelimitedFrom(java.io.InputStream input)
17160         throws java.io.IOException {
17161       return PARSER.parseDelimitedFrom(input);
17162     }
17163     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseDelimitedFrom(
17164         java.io.InputStream input,
17165         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17166         throws java.io.IOException {
17167       return PARSER.parseDelimitedFrom(input, extensionRegistry);
17168     }
17169     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom(
17170         com.google.protobuf.CodedInputStream input)
17171         throws java.io.IOException {
17172       return PARSER.parseFrom(input);
17173     }
17174     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom(
17175         com.google.protobuf.CodedInputStream input,
17176         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17177         throws java.io.IOException {
17178       return PARSER.parseFrom(input, extensionRegistry);
17179     }
17180 
17181     public static Builder newBuilder() { return Builder.create(); }
17182     public Builder newBuilderForType() { return newBuilder(); }
17183     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest prototype) {
17184       return newBuilder().mergeFrom(prototype);
17185     }
17186     public Builder toBuilder() { return newBuilder(this); }
17187 
17188     @java.lang.Override
17189     protected Builder newBuilderForType(
17190         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
17191       Builder builder = new Builder(parent);
17192       return builder;
17193     }
17194     /**
17195      * Protobuf type {@code hbase.pb.ReplicateWALEntryRequest}
17196      *
17197      * <pre>
17198      **
17199      * Replicates the given entries. The guarantee is that the given entries
17200      * will be durable on the slave cluster if this method returns without
17201      * any exception.  hbase.replication has to be set to true for this to work.
17202      * </pre>
17203      */
17204     public static final class Builder extends
17205         com.google.protobuf.GeneratedMessage.Builder<Builder>
17206        implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequestOrBuilder {
17207       public static final com.google.protobuf.Descriptors.Descriptor
17208           getDescriptor() {
17209         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_ReplicateWALEntryRequest_descriptor;
17210       }
17211 
17212       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
17213           internalGetFieldAccessorTable() {
17214         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_ReplicateWALEntryRequest_fieldAccessorTable
17215             .ensureFieldAccessorsInitialized(
17216                 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.Builder.class);
17217       }
17218 
17219       // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.newBuilder()
17220       private Builder() {
17221         maybeForceBuilderInitialization();
17222       }
17223 
17224       private Builder(
17225           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
17226         super(parent);
17227         maybeForceBuilderInitialization();
17228       }
17229       private void maybeForceBuilderInitialization() {
17230         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
17231           getEntryFieldBuilder();
17232         }
17233       }
17234       private static Builder create() {
17235         return new Builder();
17236       }
17237 
17238       public Builder clear() {
17239         super.clear();
17240         if (entryBuilder_ == null) {
17241           entry_ = java.util.Collections.emptyList();
17242           bitField0_ = (bitField0_ & ~0x00000001);
17243         } else {
17244           entryBuilder_.clear();
17245         }
17246         return this;
17247       }
17248 
17249       public Builder clone() {
17250         return create().mergeFrom(buildPartial());
17251       }
17252 
17253       public com.google.protobuf.Descriptors.Descriptor
17254           getDescriptorForType() {
17255         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_ReplicateWALEntryRequest_descriptor;
17256       }
17257 
17258       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest getDefaultInstanceForType() {
17259         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.getDefaultInstance();
17260       }
17261 
17262       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest build() {
17263         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest result = buildPartial();
17264         if (!result.isInitialized()) {
17265           throw newUninitializedMessageException(result);
17266         }
17267         return result;
17268       }
17269 
17270       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest buildPartial() {
17271         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest(this);
17272         int from_bitField0_ = bitField0_;
17273         if (entryBuilder_ == null) {
17274           if (((bitField0_ & 0x00000001) == 0x00000001)) {
17275             entry_ = java.util.Collections.unmodifiableList(entry_);
17276             bitField0_ = (bitField0_ & ~0x00000001);
17277           }
17278           result.entry_ = entry_;
17279         } else {
17280           result.entry_ = entryBuilder_.build();
17281         }
17282         onBuilt();
17283         return result;
17284       }
17285 
17286       public Builder mergeFrom(com.google.protobuf.Message other) {
17287         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest) {
17288           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest)other);
17289         } else {
17290           super.mergeFrom(other);
17291           return this;
17292         }
17293       }
17294 
17295       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest other) {
17296         if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.getDefaultInstance()) return this;
17297         if (entryBuilder_ == null) {
17298           if (!other.entry_.isEmpty()) {
17299             if (entry_.isEmpty()) {
17300               entry_ = other.entry_;
17301               bitField0_ = (bitField0_ & ~0x00000001);
17302             } else {
17303               ensureEntryIsMutable();
17304               entry_.addAll(other.entry_);
17305             }
17306             onChanged();
17307           }
17308         } else {
17309           if (!other.entry_.isEmpty()) {
17310             if (entryBuilder_.isEmpty()) {
17311               entryBuilder_.dispose();
17312               entryBuilder_ = null;
17313               entry_ = other.entry_;
17314               bitField0_ = (bitField0_ & ~0x00000001);
17315               entryBuilder_ = 
17316                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
17317                    getEntryFieldBuilder() : null;
17318             } else {
17319               entryBuilder_.addAllMessages(other.entry_);
17320             }
17321           }
17322         }
17323         this.mergeUnknownFields(other.getUnknownFields());
17324         return this;
17325       }
17326 
17327       public final boolean isInitialized() {
17328         for (int i = 0; i < getEntryCount(); i++) {
17329           if (!getEntry(i).isInitialized()) {
17330             
17331             return false;
17332           }
17333         }
17334         return true;
17335       }
17336 
17337       public Builder mergeFrom(
17338           com.google.protobuf.CodedInputStream input,
17339           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17340           throws java.io.IOException {
17341         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parsedMessage = null;
17342         try {
17343           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
17344         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
17345           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest) e.getUnfinishedMessage();
17346           throw e;
17347         } finally {
17348           if (parsedMessage != null) {
17349             mergeFrom(parsedMessage);
17350           }
17351         }
17352         return this;
17353       }
17354       private int bitField0_;
17355 
17356       // repeated .hbase.pb.WALEntry entry = 1;
17357       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry> entry_ =
17358         java.util.Collections.emptyList();
17359       private void ensureEntryIsMutable() {
17360         if (!((bitField0_ & 0x00000001) == 0x00000001)) {
17361           entry_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry>(entry_);
17362           bitField0_ |= 0x00000001;
17363          }
17364       }
17365 
17366       private com.google.protobuf.RepeatedFieldBuilder<
17367           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder> entryBuilder_;
17368 
17369       /**
17370        * <code>repeated .hbase.pb.WALEntry entry = 1;</code>
17371        */
17372       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry> getEntryList() {
17373         if (entryBuilder_ == null) {
17374           return java.util.Collections.unmodifiableList(entry_);
17375         } else {
17376           return entryBuilder_.getMessageList();
17377         }
17378       }
17379       /**
17380        * <code>repeated .hbase.pb.WALEntry entry = 1;</code>
17381        */
17382       public int getEntryCount() {
17383         if (entryBuilder_ == null) {
17384           return entry_.size();
17385         } else {
17386           return entryBuilder_.getCount();
17387         }
17388       }
17389       /**
17390        * <code>repeated .hbase.pb.WALEntry entry = 1;</code>
17391        */
17392       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry getEntry(int index) {
17393         if (entryBuilder_ == null) {
17394           return entry_.get(index);
17395         } else {
17396           return entryBuilder_.getMessage(index);
17397         }
17398       }
17399       /**
17400        * <code>repeated .hbase.pb.WALEntry entry = 1;</code>
17401        */
17402       public Builder setEntry(
17403           int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry value) {
17404         if (entryBuilder_ == null) {
17405           if (value == null) {
17406             throw new NullPointerException();
17407           }
17408           ensureEntryIsMutable();
17409           entry_.set(index, value);
17410           onChanged();
17411         } else {
17412           entryBuilder_.setMessage(index, value);
17413         }
17414         return this;
17415       }
17416       /**
17417        * <code>repeated .hbase.pb.WALEntry entry = 1;</code>
17418        */
17419       public Builder setEntry(
17420           int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder builderForValue) {
17421         if (entryBuilder_ == null) {
17422           ensureEntryIsMutable();
17423           entry_.set(index, builderForValue.build());
17424           onChanged();
17425         } else {
17426           entryBuilder_.setMessage(index, builderForValue.build());
17427         }
17428         return this;
17429       }
17430       /**
17431        * <code>repeated .hbase.pb.WALEntry entry = 1;</code>
17432        */
17433       public Builder addEntry(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry value) {
17434         if (entryBuilder_ == null) {
17435           if (value == null) {
17436             throw new NullPointerException();
17437           }
17438           ensureEntryIsMutable();
17439           entry_.add(value);
17440           onChanged();
17441         } else {
17442           entryBuilder_.addMessage(value);
17443         }
17444         return this;
17445       }
17446       /**
17447        * <code>repeated .hbase.pb.WALEntry entry = 1;</code>
17448        */
17449       public Builder addEntry(
17450           int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry value) {
17451         if (entryBuilder_ == null) {
17452           if (value == null) {
17453             throw new NullPointerException();
17454           }
17455           ensureEntryIsMutable();
17456           entry_.add(index, value);
17457           onChanged();
17458         } else {
17459           entryBuilder_.addMessage(index, value);
17460         }
17461         return this;
17462       }
17463       /**
17464        * <code>repeated .hbase.pb.WALEntry entry = 1;</code>
17465        */
17466       public Builder addEntry(
17467           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder builderForValue) {
17468         if (entryBuilder_ == null) {
17469           ensureEntryIsMutable();
17470           entry_.add(builderForValue.build());
17471           onChanged();
17472         } else {
17473           entryBuilder_.addMessage(builderForValue.build());
17474         }
17475         return this;
17476       }
17477       /**
17478        * <code>repeated .hbase.pb.WALEntry entry = 1;</code>
17479        */
17480       public Builder addEntry(
17481           int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder builderForValue) {
17482         if (entryBuilder_ == null) {
17483           ensureEntryIsMutable();
17484           entry_.add(index, builderForValue.build());
17485           onChanged();
17486         } else {
17487           entryBuilder_.addMessage(index, builderForValue.build());
17488         }
17489         return this;
17490       }
17491       /**
17492        * <code>repeated .hbase.pb.WALEntry entry = 1;</code>
17493        */
17494       public Builder addAllEntry(
17495           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry> values) {
17496         if (entryBuilder_ == null) {
17497           ensureEntryIsMutable();
17498           super.addAll(values, entry_);
17499           onChanged();
17500         } else {
17501           entryBuilder_.addAllMessages(values);
17502         }
17503         return this;
17504       }
17505       /**
17506        * <code>repeated .hbase.pb.WALEntry entry = 1;</code>
17507        */
17508       public Builder clearEntry() {
17509         if (entryBuilder_ == null) {
17510           entry_ = java.util.Collections.emptyList();
17511           bitField0_ = (bitField0_ & ~0x00000001);
17512           onChanged();
17513         } else {
17514           entryBuilder_.clear();
17515         }
17516         return this;
17517       }
17518       /**
17519        * <code>repeated .hbase.pb.WALEntry entry = 1;</code>
17520        */
17521       public Builder removeEntry(int index) {
17522         if (entryBuilder_ == null) {
17523           ensureEntryIsMutable();
17524           entry_.remove(index);
17525           onChanged();
17526         } else {
17527           entryBuilder_.remove(index);
17528         }
17529         return this;
17530       }
17531       /**
17532        * <code>repeated .hbase.pb.WALEntry entry = 1;</code>
17533        */
17534       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder getEntryBuilder(
17535           int index) {
17536         return getEntryFieldBuilder().getBuilder(index);
17537       }
17538       /**
17539        * <code>repeated .hbase.pb.WALEntry entry = 1;</code>
17540        */
17541       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder getEntryOrBuilder(
17542           int index) {
17543         if (entryBuilder_ == null) {
17544           return entry_.get(index);  } else {
17545           return entryBuilder_.getMessageOrBuilder(index);
17546         }
17547       }
17548       /**
17549        * <code>repeated .hbase.pb.WALEntry entry = 1;</code>
17550        */
17551       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder> 
17552            getEntryOrBuilderList() {
17553         if (entryBuilder_ != null) {
17554           return entryBuilder_.getMessageOrBuilderList();
17555         } else {
17556           return java.util.Collections.unmodifiableList(entry_);
17557         }
17558       }
17559       /**
17560        * <code>repeated .hbase.pb.WALEntry entry = 1;</code>
17561        */
17562       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder addEntryBuilder() {
17563         return getEntryFieldBuilder().addBuilder(
17564             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.getDefaultInstance());
17565       }
17566       /**
17567        * <code>repeated .hbase.pb.WALEntry entry = 1;</code>
17568        */
17569       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder addEntryBuilder(
17570           int index) {
17571         return getEntryFieldBuilder().addBuilder(
17572             index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.getDefaultInstance());
17573       }
17574       /**
17575        * <code>repeated .hbase.pb.WALEntry entry = 1;</code>
17576        */
17577       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder> 
17578            getEntryBuilderList() {
17579         return getEntryFieldBuilder().getBuilderList();
17580       }
17581       private com.google.protobuf.RepeatedFieldBuilder<
17582           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder> 
17583           getEntryFieldBuilder() {
17584         if (entryBuilder_ == null) {
17585           entryBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
17586               org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder>(
17587                   entry_,
17588                   ((bitField0_ & 0x00000001) == 0x00000001),
17589                   getParentForChildren(),
17590                   isClean());
17591           entry_ = null;
17592         }
17593         return entryBuilder_;
17594       }
17595 
17596       // @@protoc_insertion_point(builder_scope:hbase.pb.ReplicateWALEntryRequest)
17597     }
17598 
17599     static {
17600       defaultInstance = new ReplicateWALEntryRequest(true);
17601       defaultInstance.initFields();
17602     }
17603 
17604     // @@protoc_insertion_point(class_scope:hbase.pb.ReplicateWALEntryRequest)
17605   }
17606 
17607   public interface ReplicateWALEntryResponseOrBuilder
17608       extends com.google.protobuf.MessageOrBuilder {
17609   }
17610   /**
17611    * Protobuf type {@code hbase.pb.ReplicateWALEntryResponse}
17612    */
17613   public static final class ReplicateWALEntryResponse extends
17614       com.google.protobuf.GeneratedMessage
17615       implements ReplicateWALEntryResponseOrBuilder {
17616     // Use ReplicateWALEntryResponse.newBuilder() to construct.
17617     private ReplicateWALEntryResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
17618       super(builder);
17619       this.unknownFields = builder.getUnknownFields();
17620     }
17621     private ReplicateWALEntryResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
17622 
17623     private static final ReplicateWALEntryResponse defaultInstance;
17624     public static ReplicateWALEntryResponse getDefaultInstance() {
17625       return defaultInstance;
17626     }
17627 
17628     public ReplicateWALEntryResponse getDefaultInstanceForType() {
17629       return defaultInstance;
17630     }
17631 
17632     private final com.google.protobuf.UnknownFieldSet unknownFields;
17633     @java.lang.Override
17634     public final com.google.protobuf.UnknownFieldSet
17635         getUnknownFields() {
17636       return this.unknownFields;
17637     }
17638     private ReplicateWALEntryResponse(
17639         com.google.protobuf.CodedInputStream input,
17640         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17641         throws com.google.protobuf.InvalidProtocolBufferException {
17642       initFields();
17643       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
17644           com.google.protobuf.UnknownFieldSet.newBuilder();
17645       try {
17646         boolean done = false;
17647         while (!done) {
17648           int tag = input.readTag();
17649           switch (tag) {
17650             case 0:
17651               done = true;
17652               break;
17653             default: {
17654               if (!parseUnknownField(input, unknownFields,
17655                                      extensionRegistry, tag)) {
17656                 done = true;
17657               }
17658               break;
17659             }
17660           }
17661         }
17662       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
17663         throw e.setUnfinishedMessage(this);
17664       } catch (java.io.IOException e) {
17665         throw new com.google.protobuf.InvalidProtocolBufferException(
17666             e.getMessage()).setUnfinishedMessage(this);
17667       } finally {
17668         this.unknownFields = unknownFields.build();
17669         makeExtensionsImmutable();
17670       }
17671     }
17672     public static final com.google.protobuf.Descriptors.Descriptor
17673         getDescriptor() {
17674       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_ReplicateWALEntryResponse_descriptor;
17675     }
17676 
17677     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
17678         internalGetFieldAccessorTable() {
17679       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_ReplicateWALEntryResponse_fieldAccessorTable
17680           .ensureFieldAccessorsInitialized(
17681               org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.Builder.class);
17682     }
17683 
17684     public static com.google.protobuf.Parser<ReplicateWALEntryResponse> PARSER =
17685         new com.google.protobuf.AbstractParser<ReplicateWALEntryResponse>() {
17686       public ReplicateWALEntryResponse parsePartialFrom(
17687           com.google.protobuf.CodedInputStream input,
17688           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17689           throws com.google.protobuf.InvalidProtocolBufferException {
17690         return new ReplicateWALEntryResponse(input, extensionRegistry);
17691       }
17692     };
17693 
17694     @java.lang.Override
17695     public com.google.protobuf.Parser<ReplicateWALEntryResponse> getParserForType() {
17696       return PARSER;
17697     }
17698 
17699     private void initFields() {
17700     }
17701     private byte memoizedIsInitialized = -1;
17702     public final boolean isInitialized() {
17703       byte isInitialized = memoizedIsInitialized;
17704       if (isInitialized != -1) return isInitialized == 1;
17705 
17706       memoizedIsInitialized = 1;
17707       return true;
17708     }
17709 
17710     public void writeTo(com.google.protobuf.CodedOutputStream output)
17711                         throws java.io.IOException {
17712       getSerializedSize();
17713       getUnknownFields().writeTo(output);
17714     }
17715 
17716     private int memoizedSerializedSize = -1;
17717     public int getSerializedSize() {
17718       int size = memoizedSerializedSize;
17719       if (size != -1) return size;
17720 
17721       size = 0;
17722       size += getUnknownFields().getSerializedSize();
17723       memoizedSerializedSize = size;
17724       return size;
17725     }
17726 
17727     private static final long serialVersionUID = 0L;
17728     @java.lang.Override
17729     protected java.lang.Object writeReplace()
17730         throws java.io.ObjectStreamException {
17731       return super.writeReplace();
17732     }
17733 
17734     @java.lang.Override
17735     public boolean equals(final java.lang.Object obj) {
17736       if (obj == this) {
17737        return true;
17738       }
17739       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse)) {
17740         return super.equals(obj);
17741       }
17742       org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse) obj;
17743 
17744       boolean result = true;
17745       result = result &&
17746           getUnknownFields().equals(other.getUnknownFields());
17747       return result;
17748     }
17749 
17750     private int memoizedHashCode = 0;
17751     @java.lang.Override
17752     public int hashCode() {
17753       if (memoizedHashCode != 0) {
17754         return memoizedHashCode;
17755       }
17756       int hash = 41;
17757       hash = (19 * hash) + getDescriptorForType().hashCode();
17758       hash = (29 * hash) + getUnknownFields().hashCode();
17759       memoizedHashCode = hash;
17760       return hash;
17761     }
17762 
17763     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom(
17764         com.google.protobuf.ByteString data)
17765         throws com.google.protobuf.InvalidProtocolBufferException {
17766       return PARSER.parseFrom(data);
17767     }
17768     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom(
17769         com.google.protobuf.ByteString data,
17770         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17771         throws com.google.protobuf.InvalidProtocolBufferException {
17772       return PARSER.parseFrom(data, extensionRegistry);
17773     }
17774     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom(byte[] data)
17775         throws com.google.protobuf.InvalidProtocolBufferException {
17776       return PARSER.parseFrom(data);
17777     }
17778     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom(
17779         byte[] data,
17780         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17781         throws com.google.protobuf.InvalidProtocolBufferException {
17782       return PARSER.parseFrom(data, extensionRegistry);
17783     }
17784     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom(java.io.InputStream input)
17785         throws java.io.IOException {
17786       return PARSER.parseFrom(input);
17787     }
17788     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom(
17789         java.io.InputStream input,
17790         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17791         throws java.io.IOException {
17792       return PARSER.parseFrom(input, extensionRegistry);
17793     }
17794     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseDelimitedFrom(java.io.InputStream input)
17795         throws java.io.IOException {
17796       return PARSER.parseDelimitedFrom(input);
17797     }
17798     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseDelimitedFrom(
17799         java.io.InputStream input,
17800         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17801         throws java.io.IOException {
17802       return PARSER.parseDelimitedFrom(input, extensionRegistry);
17803     }
17804     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom(
17805         com.google.protobuf.CodedInputStream input)
17806         throws java.io.IOException {
17807       return PARSER.parseFrom(input);
17808     }
17809     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom(
17810         com.google.protobuf.CodedInputStream input,
17811         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17812         throws java.io.IOException {
17813       return PARSER.parseFrom(input, extensionRegistry);
17814     }
17815 
17816     public static Builder newBuilder() { return Builder.create(); }
17817     public Builder newBuilderForType() { return newBuilder(); }
17818     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse prototype) {
17819       return newBuilder().mergeFrom(prototype);
17820     }
17821     public Builder toBuilder() { return newBuilder(this); }
17822 
17823     @java.lang.Override
17824     protected Builder newBuilderForType(
17825         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
17826       Builder builder = new Builder(parent);
17827       return builder;
17828     }
17829     /**
17830      * Protobuf type {@code hbase.pb.ReplicateWALEntryResponse}
17831      */
17832     public static final class Builder extends
17833         com.google.protobuf.GeneratedMessage.Builder<Builder>
17834        implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponseOrBuilder {
17835       public static final com.google.protobuf.Descriptors.Descriptor
17836           getDescriptor() {
17837         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_ReplicateWALEntryResponse_descriptor;
17838       }
17839 
17840       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
17841           internalGetFieldAccessorTable() {
17842         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_ReplicateWALEntryResponse_fieldAccessorTable
17843             .ensureFieldAccessorsInitialized(
17844                 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.Builder.class);
17845       }
17846 
17847       // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.newBuilder()
17848       private Builder() {
17849         maybeForceBuilderInitialization();
17850       }
17851 
17852       private Builder(
17853           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
17854         super(parent);
17855         maybeForceBuilderInitialization();
17856       }
17857       private void maybeForceBuilderInitialization() {
17858         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
17859         }
17860       }
17861       private static Builder create() {
17862         return new Builder();
17863       }
17864 
17865       public Builder clear() {
17866         super.clear();
17867         return this;
17868       }
17869 
17870       public Builder clone() {
17871         return create().mergeFrom(buildPartial());
17872       }
17873 
17874       public com.google.protobuf.Descriptors.Descriptor
17875           getDescriptorForType() {
17876         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_ReplicateWALEntryResponse_descriptor;
17877       }
17878 
17879       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse getDefaultInstanceForType() {
17880         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDefaultInstance();
17881       }
17882 
17883       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse build() {
17884         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse result = buildPartial();
17885         if (!result.isInitialized()) {
17886           throw newUninitializedMessageException(result);
17887         }
17888         return result;
17889       }
17890 
17891       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse buildPartial() {
17892         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse(this);
17893         onBuilt();
17894         return result;
17895       }
17896 
17897       public Builder mergeFrom(com.google.protobuf.Message other) {
17898         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse) {
17899           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse)other);
17900         } else {
17901           super.mergeFrom(other);
17902           return this;
17903         }
17904       }
17905 
17906       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse other) {
17907         if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDefaultInstance()) return this;
17908         this.mergeUnknownFields(other.getUnknownFields());
17909         return this;
17910       }
17911 
17912       public final boolean isInitialized() {
17913         return true;
17914       }
17915 
17916       public Builder mergeFrom(
17917           com.google.protobuf.CodedInputStream input,
17918           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17919           throws java.io.IOException {
17920         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parsedMessage = null;
17921         try {
17922           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
17923         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
17924           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse) e.getUnfinishedMessage();
17925           throw e;
17926         } finally {
17927           if (parsedMessage != null) {
17928             mergeFrom(parsedMessage);
17929           }
17930         }
17931         return this;
17932       }
17933 
17934       // @@protoc_insertion_point(builder_scope:hbase.pb.ReplicateWALEntryResponse)
17935     }
17936 
17937     static {
17938       defaultInstance = new ReplicateWALEntryResponse(true);
17939       defaultInstance.initFields();
17940     }
17941 
17942     // @@protoc_insertion_point(class_scope:hbase.pb.ReplicateWALEntryResponse)
17943   }
17944 
17945   public interface RollWALWriterRequestOrBuilder
17946       extends com.google.protobuf.MessageOrBuilder {
17947   }
17948   /**
17949    * Protobuf type {@code hbase.pb.RollWALWriterRequest}
17950    */
17951   public static final class RollWALWriterRequest extends
17952       com.google.protobuf.GeneratedMessage
17953       implements RollWALWriterRequestOrBuilder {
17954     // Use RollWALWriterRequest.newBuilder() to construct.
17955     private RollWALWriterRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
17956       super(builder);
17957       this.unknownFields = builder.getUnknownFields();
17958     }
17959     private RollWALWriterRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
17960 
17961     private static final RollWALWriterRequest defaultInstance;
17962     public static RollWALWriterRequest getDefaultInstance() {
17963       return defaultInstance;
17964     }
17965 
17966     public RollWALWriterRequest getDefaultInstanceForType() {
17967       return defaultInstance;
17968     }
17969 
17970     private final com.google.protobuf.UnknownFieldSet unknownFields;
17971     @java.lang.Override
17972     public final com.google.protobuf.UnknownFieldSet
17973         getUnknownFields() {
17974       return this.unknownFields;
17975     }
17976     private RollWALWriterRequest(
17977         com.google.protobuf.CodedInputStream input,
17978         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17979         throws com.google.protobuf.InvalidProtocolBufferException {
17980       initFields();
17981       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
17982           com.google.protobuf.UnknownFieldSet.newBuilder();
17983       try {
17984         boolean done = false;
17985         while (!done) {
17986           int tag = input.readTag();
17987           switch (tag) {
17988             case 0:
17989               done = true;
17990               break;
17991             default: {
17992               if (!parseUnknownField(input, unknownFields,
17993                                      extensionRegistry, tag)) {
17994                 done = true;
17995               }
17996               break;
17997             }
17998           }
17999         }
18000       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
18001         throw e.setUnfinishedMessage(this);
18002       } catch (java.io.IOException e) {
18003         throw new com.google.protobuf.InvalidProtocolBufferException(
18004             e.getMessage()).setUnfinishedMessage(this);
18005       } finally {
18006         this.unknownFields = unknownFields.build();
18007         makeExtensionsImmutable();
18008       }
18009     }
18010     public static final com.google.protobuf.Descriptors.Descriptor
18011         getDescriptor() {
18012       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_RollWALWriterRequest_descriptor;
18013     }
18014 
18015     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
18016         internalGetFieldAccessorTable() {
18017       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_RollWALWriterRequest_fieldAccessorTable
18018           .ensureFieldAccessorsInitialized(
18019               org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.Builder.class);
18020     }
18021 
18022     public static com.google.protobuf.Parser<RollWALWriterRequest> PARSER =
18023         new com.google.protobuf.AbstractParser<RollWALWriterRequest>() {
18024       public RollWALWriterRequest parsePartialFrom(
18025           com.google.protobuf.CodedInputStream input,
18026           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18027           throws com.google.protobuf.InvalidProtocolBufferException {
18028         return new RollWALWriterRequest(input, extensionRegistry);
18029       }
18030     };
18031 
18032     @java.lang.Override
18033     public com.google.protobuf.Parser<RollWALWriterRequest> getParserForType() {
18034       return PARSER;
18035     }
18036 
18037     private void initFields() {
18038     }
18039     private byte memoizedIsInitialized = -1;
18040     public final boolean isInitialized() {
18041       byte isInitialized = memoizedIsInitialized;
18042       if (isInitialized != -1) return isInitialized == 1;
18043 
18044       memoizedIsInitialized = 1;
18045       return true;
18046     }
18047 
18048     public void writeTo(com.google.protobuf.CodedOutputStream output)
18049                         throws java.io.IOException {
18050       getSerializedSize();
18051       getUnknownFields().writeTo(output);
18052     }
18053 
18054     private int memoizedSerializedSize = -1;
18055     public int getSerializedSize() {
18056       int size = memoizedSerializedSize;
18057       if (size != -1) return size;
18058 
18059       size = 0;
18060       size += getUnknownFields().getSerializedSize();
18061       memoizedSerializedSize = size;
18062       return size;
18063     }
18064 
18065     private static final long serialVersionUID = 0L;
18066     @java.lang.Override
18067     protected java.lang.Object writeReplace()
18068         throws java.io.ObjectStreamException {
18069       return super.writeReplace();
18070     }
18071 
18072     @java.lang.Override
18073     public boolean equals(final java.lang.Object obj) {
18074       if (obj == this) {
18075        return true;
18076       }
18077       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest)) {
18078         return super.equals(obj);
18079       }
18080       org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest) obj;
18081 
18082       boolean result = true;
18083       result = result &&
18084           getUnknownFields().equals(other.getUnknownFields());
18085       return result;
18086     }
18087 
18088     private int memoizedHashCode = 0;
18089     @java.lang.Override
18090     public int hashCode() {
18091       if (memoizedHashCode != 0) {
18092         return memoizedHashCode;
18093       }
18094       int hash = 41;
18095       hash = (19 * hash) + getDescriptorForType().hashCode();
18096       hash = (29 * hash) + getUnknownFields().hashCode();
18097       memoizedHashCode = hash;
18098       return hash;
18099     }
18100 
18101     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom(
18102         com.google.protobuf.ByteString data)
18103         throws com.google.protobuf.InvalidProtocolBufferException {
18104       return PARSER.parseFrom(data);
18105     }
18106     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom(
18107         com.google.protobuf.ByteString data,
18108         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18109         throws com.google.protobuf.InvalidProtocolBufferException {
18110       return PARSER.parseFrom(data, extensionRegistry);
18111     }
18112     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom(byte[] data)
18113         throws com.google.protobuf.InvalidProtocolBufferException {
18114       return PARSER.parseFrom(data);
18115     }
18116     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom(
18117         byte[] data,
18118         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18119         throws com.google.protobuf.InvalidProtocolBufferException {
18120       return PARSER.parseFrom(data, extensionRegistry);
18121     }
18122     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom(java.io.InputStream input)
18123         throws java.io.IOException {
18124       return PARSER.parseFrom(input);
18125     }
18126     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom(
18127         java.io.InputStream input,
18128         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18129         throws java.io.IOException {
18130       return PARSER.parseFrom(input, extensionRegistry);
18131     }
18132     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseDelimitedFrom(java.io.InputStream input)
18133         throws java.io.IOException {
18134       return PARSER.parseDelimitedFrom(input);
18135     }
18136     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseDelimitedFrom(
18137         java.io.InputStream input,
18138         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18139         throws java.io.IOException {
18140       return PARSER.parseDelimitedFrom(input, extensionRegistry);
18141     }
18142     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom(
18143         com.google.protobuf.CodedInputStream input)
18144         throws java.io.IOException {
18145       return PARSER.parseFrom(input);
18146     }
18147     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom(
18148         com.google.protobuf.CodedInputStream input,
18149         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18150         throws java.io.IOException {
18151       return PARSER.parseFrom(input, extensionRegistry);
18152     }
18153 
18154     public static Builder newBuilder() { return Builder.create(); }
18155     public Builder newBuilderForType() { return newBuilder(); }
18156     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest prototype) {
18157       return newBuilder().mergeFrom(prototype);
18158     }
18159     public Builder toBuilder() { return newBuilder(this); }
18160 
18161     @java.lang.Override
18162     protected Builder newBuilderForType(
18163         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
18164       Builder builder = new Builder(parent);
18165       return builder;
18166     }
18167     /**
18168      * Protobuf type {@code hbase.pb.RollWALWriterRequest}
18169      */
18170     public static final class Builder extends
18171         com.google.protobuf.GeneratedMessage.Builder<Builder>
18172        implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequestOrBuilder {
18173       public static final com.google.protobuf.Descriptors.Descriptor
18174           getDescriptor() {
18175         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_RollWALWriterRequest_descriptor;
18176       }
18177 
18178       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
18179           internalGetFieldAccessorTable() {
18180         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_RollWALWriterRequest_fieldAccessorTable
18181             .ensureFieldAccessorsInitialized(
18182                 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.Builder.class);
18183       }
18184 
18185       // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.newBuilder()
18186       private Builder() {
18187         maybeForceBuilderInitialization();
18188       }
18189 
18190       private Builder(
18191           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
18192         super(parent);
18193         maybeForceBuilderInitialization();
18194       }
18195       private void maybeForceBuilderInitialization() {
18196         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
18197         }
18198       }
18199       private static Builder create() {
18200         return new Builder();
18201       }
18202 
18203       public Builder clear() {
18204         super.clear();
18205         return this;
18206       }
18207 
18208       public Builder clone() {
18209         return create().mergeFrom(buildPartial());
18210       }
18211 
18212       public com.google.protobuf.Descriptors.Descriptor
18213           getDescriptorForType() {
18214         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_RollWALWriterRequest_descriptor;
18215       }
18216 
18217       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest getDefaultInstanceForType() {
18218         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.getDefaultInstance();
18219       }
18220 
18221       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest build() {
18222         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest result = buildPartial();
18223         if (!result.isInitialized()) {
18224           throw newUninitializedMessageException(result);
18225         }
18226         return result;
18227       }
18228 
18229       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest buildPartial() {
18230         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest(this);
18231         onBuilt();
18232         return result;
18233       }
18234 
18235       public Builder mergeFrom(com.google.protobuf.Message other) {
18236         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest) {
18237           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest)other);
18238         } else {
18239           super.mergeFrom(other);
18240           return this;
18241         }
18242       }
18243 
18244       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest other) {
18245         if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.getDefaultInstance()) return this;
18246         this.mergeUnknownFields(other.getUnknownFields());
18247         return this;
18248       }
18249 
18250       public final boolean isInitialized() {
18251         return true;
18252       }
18253 
18254       public Builder mergeFrom(
18255           com.google.protobuf.CodedInputStream input,
18256           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18257           throws java.io.IOException {
18258         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parsedMessage = null;
18259         try {
18260           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
18261         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
18262           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest) e.getUnfinishedMessage();
18263           throw e;
18264         } finally {
18265           if (parsedMessage != null) {
18266             mergeFrom(parsedMessage);
18267           }
18268         }
18269         return this;
18270       }
18271 
18272       // @@protoc_insertion_point(builder_scope:hbase.pb.RollWALWriterRequest)
18273     }
18274 
18275     static {
18276       defaultInstance = new RollWALWriterRequest(true);
18277       defaultInstance.initFields();
18278     }
18279 
18280     // @@protoc_insertion_point(class_scope:hbase.pb.RollWALWriterRequest)
18281   }
18282 
18283   public interface RollWALWriterResponseOrBuilder
18284       extends com.google.protobuf.MessageOrBuilder {
18285 
18286     // repeated bytes region_to_flush = 1;
18287     /**
18288      * <code>repeated bytes region_to_flush = 1;</code>
18289      *
18290      * <pre>
18291      * A list of encoded name of regions to flush
18292      * </pre>
18293      */
18294     java.util.List<com.google.protobuf.ByteString> getRegionToFlushList();
18295     /**
18296      * <code>repeated bytes region_to_flush = 1;</code>
18297      *
18298      * <pre>
18299      * A list of encoded name of regions to flush
18300      * </pre>
18301      */
18302     int getRegionToFlushCount();
18303     /**
18304      * <code>repeated bytes region_to_flush = 1;</code>
18305      *
18306      * <pre>
18307      * A list of encoded name of regions to flush
18308      * </pre>
18309      */
18310     com.google.protobuf.ByteString getRegionToFlush(int index);
18311   }
18312   /**
18313    * Protobuf type {@code hbase.pb.RollWALWriterResponse}
18314    *
18315    * <pre>
18316    *
18317    * Roll request responses no longer include regions to flush
18318    * this list will always be empty when talking to a 1.0 server
18319    * </pre>
18320    */
18321   public static final class RollWALWriterResponse extends
18322       com.google.protobuf.GeneratedMessage
18323       implements RollWALWriterResponseOrBuilder {
18324     // Use RollWALWriterResponse.newBuilder() to construct.
18325     private RollWALWriterResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
18326       super(builder);
18327       this.unknownFields = builder.getUnknownFields();
18328     }
18329     private RollWALWriterResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
18330 
18331     private static final RollWALWriterResponse defaultInstance;
18332     public static RollWALWriterResponse getDefaultInstance() {
18333       return defaultInstance;
18334     }
18335 
18336     public RollWALWriterResponse getDefaultInstanceForType() {
18337       return defaultInstance;
18338     }
18339 
18340     private final com.google.protobuf.UnknownFieldSet unknownFields;
18341     @java.lang.Override
18342     public final com.google.protobuf.UnknownFieldSet
18343         getUnknownFields() {
18344       return this.unknownFields;
18345     }
18346     private RollWALWriterResponse(
18347         com.google.protobuf.CodedInputStream input,
18348         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18349         throws com.google.protobuf.InvalidProtocolBufferException {
18350       initFields();
18351       int mutable_bitField0_ = 0;
18352       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
18353           com.google.protobuf.UnknownFieldSet.newBuilder();
18354       try {
18355         boolean done = false;
18356         while (!done) {
18357           int tag = input.readTag();
18358           switch (tag) {
18359             case 0:
18360               done = true;
18361               break;
18362             default: {
18363               if (!parseUnknownField(input, unknownFields,
18364                                      extensionRegistry, tag)) {
18365                 done = true;
18366               }
18367               break;
18368             }
18369             case 10: {
18370               if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
18371                 regionToFlush_ = new java.util.ArrayList<com.google.protobuf.ByteString>();
18372                 mutable_bitField0_ |= 0x00000001;
18373               }
18374               regionToFlush_.add(input.readBytes());
18375               break;
18376             }
18377           }
18378         }
18379       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
18380         throw e.setUnfinishedMessage(this);
18381       } catch (java.io.IOException e) {
18382         throw new com.google.protobuf.InvalidProtocolBufferException(
18383             e.getMessage()).setUnfinishedMessage(this);
18384       } finally {
18385         if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
18386           regionToFlush_ = java.util.Collections.unmodifiableList(regionToFlush_);
18387         }
18388         this.unknownFields = unknownFields.build();
18389         makeExtensionsImmutable();
18390       }
18391     }
18392     public static final com.google.protobuf.Descriptors.Descriptor
18393         getDescriptor() {
18394       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_RollWALWriterResponse_descriptor;
18395     }
18396 
18397     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
18398         internalGetFieldAccessorTable() {
18399       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_RollWALWriterResponse_fieldAccessorTable
18400           .ensureFieldAccessorsInitialized(
18401               org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.Builder.class);
18402     }
18403 
18404     public static com.google.protobuf.Parser<RollWALWriterResponse> PARSER =
18405         new com.google.protobuf.AbstractParser<RollWALWriterResponse>() {
18406       public RollWALWriterResponse parsePartialFrom(
18407           com.google.protobuf.CodedInputStream input,
18408           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18409           throws com.google.protobuf.InvalidProtocolBufferException {
18410         return new RollWALWriterResponse(input, extensionRegistry);
18411       }
18412     };
18413 
18414     @java.lang.Override
18415     public com.google.protobuf.Parser<RollWALWriterResponse> getParserForType() {
18416       return PARSER;
18417     }
18418 
18419     // repeated bytes region_to_flush = 1;
18420     public static final int REGION_TO_FLUSH_FIELD_NUMBER = 1;
18421     private java.util.List<com.google.protobuf.ByteString> regionToFlush_;
18422     /**
18423      * <code>repeated bytes region_to_flush = 1;</code>
18424      *
18425      * <pre>
18426      * A list of encoded name of regions to flush
18427      * </pre>
18428      */
18429     public java.util.List<com.google.protobuf.ByteString>
18430         getRegionToFlushList() {
18431       return regionToFlush_;
18432     }
18433     /**
18434      * <code>repeated bytes region_to_flush = 1;</code>
18435      *
18436      * <pre>
18437      * A list of encoded name of regions to flush
18438      * </pre>
18439      */
18440     public int getRegionToFlushCount() {
18441       return regionToFlush_.size();
18442     }
18443     /**
18444      * <code>repeated bytes region_to_flush = 1;</code>
18445      *
18446      * <pre>
18447      * A list of encoded name of regions to flush
18448      * </pre>
18449      */
18450     public com.google.protobuf.ByteString getRegionToFlush(int index) {
18451       return regionToFlush_.get(index);
18452     }
18453 
18454     private void initFields() {
18455       regionToFlush_ = java.util.Collections.emptyList();
18456     }
18457     private byte memoizedIsInitialized = -1;
18458     public final boolean isInitialized() {
18459       byte isInitialized = memoizedIsInitialized;
18460       if (isInitialized != -1) return isInitialized == 1;
18461 
18462       memoizedIsInitialized = 1;
18463       return true;
18464     }
18465 
18466     public void writeTo(com.google.protobuf.CodedOutputStream output)
18467                         throws java.io.IOException {
18468       getSerializedSize();
18469       for (int i = 0; i < regionToFlush_.size(); i++) {
18470         output.writeBytes(1, regionToFlush_.get(i));
18471       }
18472       getUnknownFields().writeTo(output);
18473     }
18474 
18475     private int memoizedSerializedSize = -1;
18476     public int getSerializedSize() {
18477       int size = memoizedSerializedSize;
18478       if (size != -1) return size;
18479 
18480       size = 0;
18481       {
18482         int dataSize = 0;
18483         for (int i = 0; i < regionToFlush_.size(); i++) {
18484           dataSize += com.google.protobuf.CodedOutputStream
18485             .computeBytesSizeNoTag(regionToFlush_.get(i));
18486         }
18487         size += dataSize;
18488         size += 1 * getRegionToFlushList().size();
18489       }
18490       size += getUnknownFields().getSerializedSize();
18491       memoizedSerializedSize = size;
18492       return size;
18493     }
18494 
18495     private static final long serialVersionUID = 0L;
18496     @java.lang.Override
18497     protected java.lang.Object writeReplace()
18498         throws java.io.ObjectStreamException {
18499       return super.writeReplace();
18500     }
18501 
18502     @java.lang.Override
18503     public boolean equals(final java.lang.Object obj) {
18504       if (obj == this) {
18505        return true;
18506       }
18507       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse)) {
18508         return super.equals(obj);
18509       }
18510       org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse) obj;
18511 
18512       boolean result = true;
18513       result = result && getRegionToFlushList()
18514           .equals(other.getRegionToFlushList());
18515       result = result &&
18516           getUnknownFields().equals(other.getUnknownFields());
18517       return result;
18518     }
18519 
18520     private int memoizedHashCode = 0;
18521     @java.lang.Override
18522     public int hashCode() {
18523       if (memoizedHashCode != 0) {
18524         return memoizedHashCode;
18525       }
18526       int hash = 41;
18527       hash = (19 * hash) + getDescriptorForType().hashCode();
18528       if (getRegionToFlushCount() > 0) {
18529         hash = (37 * hash) + REGION_TO_FLUSH_FIELD_NUMBER;
18530         hash = (53 * hash) + getRegionToFlushList().hashCode();
18531       }
18532       hash = (29 * hash) + getUnknownFields().hashCode();
18533       memoizedHashCode = hash;
18534       return hash;
18535     }
18536 
18537     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom(
18538         com.google.protobuf.ByteString data)
18539         throws com.google.protobuf.InvalidProtocolBufferException {
18540       return PARSER.parseFrom(data);
18541     }
18542     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom(
18543         com.google.protobuf.ByteString data,
18544         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18545         throws com.google.protobuf.InvalidProtocolBufferException {
18546       return PARSER.parseFrom(data, extensionRegistry);
18547     }
18548     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom(byte[] data)
18549         throws com.google.protobuf.InvalidProtocolBufferException {
18550       return PARSER.parseFrom(data);
18551     }
18552     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom(
18553         byte[] data,
18554         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18555         throws com.google.protobuf.InvalidProtocolBufferException {
18556       return PARSER.parseFrom(data, extensionRegistry);
18557     }
18558     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom(java.io.InputStream input)
18559         throws java.io.IOException {
18560       return PARSER.parseFrom(input);
18561     }
18562     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom(
18563         java.io.InputStream input,
18564         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18565         throws java.io.IOException {
18566       return PARSER.parseFrom(input, extensionRegistry);
18567     }
18568     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseDelimitedFrom(java.io.InputStream input)
18569         throws java.io.IOException {
18570       return PARSER.parseDelimitedFrom(input);
18571     }
18572     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseDelimitedFrom(
18573         java.io.InputStream input,
18574         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18575         throws java.io.IOException {
18576       return PARSER.parseDelimitedFrom(input, extensionRegistry);
18577     }
18578     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom(
18579         com.google.protobuf.CodedInputStream input)
18580         throws java.io.IOException {
18581       return PARSER.parseFrom(input);
18582     }
18583     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom(
18584         com.google.protobuf.CodedInputStream input,
18585         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18586         throws java.io.IOException {
18587       return PARSER.parseFrom(input, extensionRegistry);
18588     }
18589 
18590     public static Builder newBuilder() { return Builder.create(); }
18591     public Builder newBuilderForType() { return newBuilder(); }
18592     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse prototype) {
18593       return newBuilder().mergeFrom(prototype);
18594     }
18595     public Builder toBuilder() { return newBuilder(this); }
18596 
18597     @java.lang.Override
18598     protected Builder newBuilderForType(
18599         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
18600       Builder builder = new Builder(parent);
18601       return builder;
18602     }
18603     /**
18604      * Protobuf type {@code hbase.pb.RollWALWriterResponse}
18605      *
18606      * <pre>
18607      *
18608      * Roll request responses no longer include regions to flush
18609      * this list will always be empty when talking to a 1.0 server
18610      * </pre>
18611      */
18612     public static final class Builder extends
18613         com.google.protobuf.GeneratedMessage.Builder<Builder>
18614        implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponseOrBuilder {
18615       public static final com.google.protobuf.Descriptors.Descriptor
18616           getDescriptor() {
18617         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_RollWALWriterResponse_descriptor;
18618       }
18619 
18620       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
18621           internalGetFieldAccessorTable() {
18622         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_RollWALWriterResponse_fieldAccessorTable
18623             .ensureFieldAccessorsInitialized(
18624                 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.Builder.class);
18625       }
18626 
18627       // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.newBuilder()
18628       private Builder() {
18629         maybeForceBuilderInitialization();
18630       }
18631 
18632       private Builder(
18633           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
18634         super(parent);
18635         maybeForceBuilderInitialization();
18636       }
18637       private void maybeForceBuilderInitialization() {
18638         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
18639         }
18640       }
18641       private static Builder create() {
18642         return new Builder();
18643       }
18644 
18645       public Builder clear() {
18646         super.clear();
18647         regionToFlush_ = java.util.Collections.emptyList();
18648         bitField0_ = (bitField0_ & ~0x00000001);
18649         return this;
18650       }
18651 
18652       public Builder clone() {
18653         return create().mergeFrom(buildPartial());
18654       }
18655 
18656       public com.google.protobuf.Descriptors.Descriptor
18657           getDescriptorForType() {
18658         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_RollWALWriterResponse_descriptor;
18659       }
18660 
18661       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse getDefaultInstanceForType() {
18662         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.getDefaultInstance();
18663       }
18664 
18665       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse build() {
18666         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse result = buildPartial();
18667         if (!result.isInitialized()) {
18668           throw newUninitializedMessageException(result);
18669         }
18670         return result;
18671       }
18672 
18673       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse buildPartial() {
18674         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse(this);
18675         int from_bitField0_ = bitField0_;
18676         if (((bitField0_ & 0x00000001) == 0x00000001)) {
18677           regionToFlush_ = java.util.Collections.unmodifiableList(regionToFlush_);
18678           bitField0_ = (bitField0_ & ~0x00000001);
18679         }
18680         result.regionToFlush_ = regionToFlush_;
18681         onBuilt();
18682         return result;
18683       }
18684 
18685       public Builder mergeFrom(com.google.protobuf.Message other) {
18686         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse) {
18687           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse)other);
18688         } else {
18689           super.mergeFrom(other);
18690           return this;
18691         }
18692       }
18693 
18694       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse other) {
18695         if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.getDefaultInstance()) return this;
18696         if (!other.regionToFlush_.isEmpty()) {
18697           if (regionToFlush_.isEmpty()) {
18698             regionToFlush_ = other.regionToFlush_;
18699             bitField0_ = (bitField0_ & ~0x00000001);
18700           } else {
18701             ensureRegionToFlushIsMutable();
18702             regionToFlush_.addAll(other.regionToFlush_);
18703           }
18704           onChanged();
18705         }
18706         this.mergeUnknownFields(other.getUnknownFields());
18707         return this;
18708       }
18709 
18710       public final boolean isInitialized() {
18711         return true;
18712       }
18713 
18714       public Builder mergeFrom(
18715           com.google.protobuf.CodedInputStream input,
18716           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18717           throws java.io.IOException {
18718         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parsedMessage = null;
18719         try {
18720           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
18721         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
18722           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse) e.getUnfinishedMessage();
18723           throw e;
18724         } finally {
18725           if (parsedMessage != null) {
18726             mergeFrom(parsedMessage);
18727           }
18728         }
18729         return this;
18730       }
18731       private int bitField0_;
18732 
18733       // repeated bytes region_to_flush = 1;
18734       private java.util.List<com.google.protobuf.ByteString> regionToFlush_ = java.util.Collections.emptyList();
18735       private void ensureRegionToFlushIsMutable() {
18736         if (!((bitField0_ & 0x00000001) == 0x00000001)) {
18737           regionToFlush_ = new java.util.ArrayList<com.google.protobuf.ByteString>(regionToFlush_);
18738           bitField0_ |= 0x00000001;
18739          }
18740       }
18741       /**
18742        * <code>repeated bytes region_to_flush = 1;</code>
18743        *
18744        * <pre>
18745        * A list of encoded name of regions to flush
18746        * </pre>
18747        */
18748       public java.util.List<com.google.protobuf.ByteString>
18749           getRegionToFlushList() {
18750         return java.util.Collections.unmodifiableList(regionToFlush_);
18751       }
18752       /**
18753        * <code>repeated bytes region_to_flush = 1;</code>
18754        *
18755        * <pre>
18756        * A list of encoded name of regions to flush
18757        * </pre>
18758        */
18759       public int getRegionToFlushCount() {
18760         return regionToFlush_.size();
18761       }
18762       /**
18763        * <code>repeated bytes region_to_flush = 1;</code>
18764        *
18765        * <pre>
18766        * A list of encoded name of regions to flush
18767        * </pre>
18768        */
18769       public com.google.protobuf.ByteString getRegionToFlush(int index) {
18770         return regionToFlush_.get(index);
18771       }
18772       /**
18773        * <code>repeated bytes region_to_flush = 1;</code>
18774        *
18775        * <pre>
18776        * A list of encoded name of regions to flush
18777        * </pre>
18778        */
18779       public Builder setRegionToFlush(
18780           int index, com.google.protobuf.ByteString value) {
18781         if (value == null) {
18782     throw new NullPointerException();
18783   }
18784   ensureRegionToFlushIsMutable();
18785         regionToFlush_.set(index, value);
18786         onChanged();
18787         return this;
18788       }
18789       /**
18790        * <code>repeated bytes region_to_flush = 1;</code>
18791        *
18792        * <pre>
18793        * A list of encoded name of regions to flush
18794        * </pre>
18795        */
18796       public Builder addRegionToFlush(com.google.protobuf.ByteString value) {
18797         if (value == null) {
18798     throw new NullPointerException();
18799   }
18800   ensureRegionToFlushIsMutable();
18801         regionToFlush_.add(value);
18802         onChanged();
18803         return this;
18804       }
18805       /**
18806        * <code>repeated bytes region_to_flush = 1;</code>
18807        *
18808        * <pre>
18809        * A list of encoded name of regions to flush
18810        * </pre>
18811        */
18812       public Builder addAllRegionToFlush(
18813           java.lang.Iterable<? extends com.google.protobuf.ByteString> values) {
18814         ensureRegionToFlushIsMutable();
18815         super.addAll(values, regionToFlush_);
18816         onChanged();
18817         return this;
18818       }
18819       /**
18820        * <code>repeated bytes region_to_flush = 1;</code>
18821        *
18822        * <pre>
18823        * A list of encoded name of regions to flush
18824        * </pre>
18825        */
18826       public Builder clearRegionToFlush() {
18827         regionToFlush_ = java.util.Collections.emptyList();
18828         bitField0_ = (bitField0_ & ~0x00000001);
18829         onChanged();
18830         return this;
18831       }
18832 
18833       // @@protoc_insertion_point(builder_scope:hbase.pb.RollWALWriterResponse)
18834     }
18835 
18836     static {
18837       defaultInstance = new RollWALWriterResponse(true);
18838       defaultInstance.initFields();
18839     }
18840 
18841     // @@protoc_insertion_point(class_scope:hbase.pb.RollWALWriterResponse)
18842   }
18843 
18844   public interface StopServerRequestOrBuilder
18845       extends com.google.protobuf.MessageOrBuilder {
18846 
18847     // required string reason = 1;
18848     /**
18849      * <code>required string reason = 1;</code>
18850      */
18851     boolean hasReason();
18852     /**
18853      * <code>required string reason = 1;</code>
18854      */
18855     java.lang.String getReason();
18856     /**
18857      * <code>required string reason = 1;</code>
18858      */
18859     com.google.protobuf.ByteString
18860         getReasonBytes();
18861   }
18862   /**
18863    * Protobuf type {@code hbase.pb.StopServerRequest}
18864    */
18865   public static final class StopServerRequest extends
18866       com.google.protobuf.GeneratedMessage
18867       implements StopServerRequestOrBuilder {
18868     // Use StopServerRequest.newBuilder() to construct.
18869     private StopServerRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
18870       super(builder);
18871       this.unknownFields = builder.getUnknownFields();
18872     }
18873     private StopServerRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
18874 
18875     private static final StopServerRequest defaultInstance;
18876     public static StopServerRequest getDefaultInstance() {
18877       return defaultInstance;
18878     }
18879 
18880     public StopServerRequest getDefaultInstanceForType() {
18881       return defaultInstance;
18882     }
18883 
18884     private final com.google.protobuf.UnknownFieldSet unknownFields;
18885     @java.lang.Override
18886     public final com.google.protobuf.UnknownFieldSet
18887         getUnknownFields() {
18888       return this.unknownFields;
18889     }
18890     private StopServerRequest(
18891         com.google.protobuf.CodedInputStream input,
18892         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18893         throws com.google.protobuf.InvalidProtocolBufferException {
18894       initFields();
18895       int mutable_bitField0_ = 0;
18896       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
18897           com.google.protobuf.UnknownFieldSet.newBuilder();
18898       try {
18899         boolean done = false;
18900         while (!done) {
18901           int tag = input.readTag();
18902           switch (tag) {
18903             case 0:
18904               done = true;
18905               break;
18906             default: {
18907               if (!parseUnknownField(input, unknownFields,
18908                                      extensionRegistry, tag)) {
18909                 done = true;
18910               }
18911               break;
18912             }
18913             case 10: {
18914               bitField0_ |= 0x00000001;
18915               reason_ = input.readBytes();
18916               break;
18917             }
18918           }
18919         }
18920       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
18921         throw e.setUnfinishedMessage(this);
18922       } catch (java.io.IOException e) {
18923         throw new com.google.protobuf.InvalidProtocolBufferException(
18924             e.getMessage()).setUnfinishedMessage(this);
18925       } finally {
18926         this.unknownFields = unknownFields.build();
18927         makeExtensionsImmutable();
18928       }
18929     }
18930     public static final com.google.protobuf.Descriptors.Descriptor
18931         getDescriptor() {
18932       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_StopServerRequest_descriptor;
18933     }
18934 
18935     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
18936         internalGetFieldAccessorTable() {
18937       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_StopServerRequest_fieldAccessorTable
18938           .ensureFieldAccessorsInitialized(
18939               org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.Builder.class);
18940     }
18941 
18942     public static com.google.protobuf.Parser<StopServerRequest> PARSER =
18943         new com.google.protobuf.AbstractParser<StopServerRequest>() {
18944       public StopServerRequest parsePartialFrom(
18945           com.google.protobuf.CodedInputStream input,
18946           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18947           throws com.google.protobuf.InvalidProtocolBufferException {
18948         return new StopServerRequest(input, extensionRegistry);
18949       }
18950     };
18951 
18952     @java.lang.Override
18953     public com.google.protobuf.Parser<StopServerRequest> getParserForType() {
18954       return PARSER;
18955     }
18956 
18957     private int bitField0_;
18958     // required string reason = 1;
18959     public static final int REASON_FIELD_NUMBER = 1;
18960     private java.lang.Object reason_;
18961     /**
18962      * <code>required string reason = 1;</code>
18963      */
18964     public boolean hasReason() {
18965       return ((bitField0_ & 0x00000001) == 0x00000001);
18966     }
18967     /**
18968      * <code>required string reason = 1;</code>
18969      */
18970     public java.lang.String getReason() {
18971       java.lang.Object ref = reason_;
18972       if (ref instanceof java.lang.String) {
18973         return (java.lang.String) ref;
18974       } else {
18975         com.google.protobuf.ByteString bs = 
18976             (com.google.protobuf.ByteString) ref;
18977         java.lang.String s = bs.toStringUtf8();
18978         if (bs.isValidUtf8()) {
18979           reason_ = s;
18980         }
18981         return s;
18982       }
18983     }
18984     /**
18985      * <code>required string reason = 1;</code>
18986      */
18987     public com.google.protobuf.ByteString
18988         getReasonBytes() {
18989       java.lang.Object ref = reason_;
18990       if (ref instanceof java.lang.String) {
18991         com.google.protobuf.ByteString b = 
18992             com.google.protobuf.ByteString.copyFromUtf8(
18993                 (java.lang.String) ref);
18994         reason_ = b;
18995         return b;
18996       } else {
18997         return (com.google.protobuf.ByteString) ref;
18998       }
18999     }
19000 
19001     private void initFields() {
19002       reason_ = "";
19003     }
19004     private byte memoizedIsInitialized = -1;
19005     public final boolean isInitialized() {
19006       byte isInitialized = memoizedIsInitialized;
19007       if (isInitialized != -1) return isInitialized == 1;
19008 
19009       if (!hasReason()) {
19010         memoizedIsInitialized = 0;
19011         return false;
19012       }
19013       memoizedIsInitialized = 1;
19014       return true;
19015     }
19016 
19017     public void writeTo(com.google.protobuf.CodedOutputStream output)
19018                         throws java.io.IOException {
19019       getSerializedSize();
19020       if (((bitField0_ & 0x00000001) == 0x00000001)) {
19021         output.writeBytes(1, getReasonBytes());
19022       }
19023       getUnknownFields().writeTo(output);
19024     }
19025 
19026     private int memoizedSerializedSize = -1;
19027     public int getSerializedSize() {
19028       int size = memoizedSerializedSize;
19029       if (size != -1) return size;
19030 
19031       size = 0;
19032       if (((bitField0_ & 0x00000001) == 0x00000001)) {
19033         size += com.google.protobuf.CodedOutputStream
19034           .computeBytesSize(1, getReasonBytes());
19035       }
19036       size += getUnknownFields().getSerializedSize();
19037       memoizedSerializedSize = size;
19038       return size;
19039     }
19040 
19041     private static final long serialVersionUID = 0L;
19042     @java.lang.Override
19043     protected java.lang.Object writeReplace()
19044         throws java.io.ObjectStreamException {
19045       return super.writeReplace();
19046     }
19047 
19048     @java.lang.Override
19049     public boolean equals(final java.lang.Object obj) {
19050       if (obj == this) {
19051        return true;
19052       }
19053       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest)) {
19054         return super.equals(obj);
19055       }
19056       org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest) obj;
19057 
19058       boolean result = true;
19059       result = result && (hasReason() == other.hasReason());
19060       if (hasReason()) {
19061         result = result && getReason()
19062             .equals(other.getReason());
19063       }
19064       result = result &&
19065           getUnknownFields().equals(other.getUnknownFields());
19066       return result;
19067     }
19068 
19069     private int memoizedHashCode = 0;
19070     @java.lang.Override
19071     public int hashCode() {
19072       if (memoizedHashCode != 0) {
19073         return memoizedHashCode;
19074       }
19075       int hash = 41;
19076       hash = (19 * hash) + getDescriptorForType().hashCode();
19077       if (hasReason()) {
19078         hash = (37 * hash) + REASON_FIELD_NUMBER;
19079         hash = (53 * hash) + getReason().hashCode();
19080       }
19081       hash = (29 * hash) + getUnknownFields().hashCode();
19082       memoizedHashCode = hash;
19083       return hash;
19084     }
19085 
19086     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom(
19087         com.google.protobuf.ByteString data)
19088         throws com.google.protobuf.InvalidProtocolBufferException {
19089       return PARSER.parseFrom(data);
19090     }
19091     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom(
19092         com.google.protobuf.ByteString data,
19093         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19094         throws com.google.protobuf.InvalidProtocolBufferException {
19095       return PARSER.parseFrom(data, extensionRegistry);
19096     }
19097     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom(byte[] data)
19098         throws com.google.protobuf.InvalidProtocolBufferException {
19099       return PARSER.parseFrom(data);
19100     }
19101     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom(
19102         byte[] data,
19103         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19104         throws com.google.protobuf.InvalidProtocolBufferException {
19105       return PARSER.parseFrom(data, extensionRegistry);
19106     }
19107     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom(java.io.InputStream input)
19108         throws java.io.IOException {
19109       return PARSER.parseFrom(input);
19110     }
19111     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom(
19112         java.io.InputStream input,
19113         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19114         throws java.io.IOException {
19115       return PARSER.parseFrom(input, extensionRegistry);
19116     }
19117     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseDelimitedFrom(java.io.InputStream input)
19118         throws java.io.IOException {
19119       return PARSER.parseDelimitedFrom(input);
19120     }
19121     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseDelimitedFrom(
19122         java.io.InputStream input,
19123         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19124         throws java.io.IOException {
19125       return PARSER.parseDelimitedFrom(input, extensionRegistry);
19126     }
19127     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom(
19128         com.google.protobuf.CodedInputStream input)
19129         throws java.io.IOException {
19130       return PARSER.parseFrom(input);
19131     }
19132     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom(
19133         com.google.protobuf.CodedInputStream input,
19134         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19135         throws java.io.IOException {
19136       return PARSER.parseFrom(input, extensionRegistry);
19137     }
19138 
19139     public static Builder newBuilder() { return Builder.create(); }
19140     public Builder newBuilderForType() { return newBuilder(); }
19141     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest prototype) {
19142       return newBuilder().mergeFrom(prototype);
19143     }
19144     public Builder toBuilder() { return newBuilder(this); }
19145 
19146     @java.lang.Override
19147     protected Builder newBuilderForType(
19148         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
19149       Builder builder = new Builder(parent);
19150       return builder;
19151     }
19152     /**
19153      * Protobuf type {@code hbase.pb.StopServerRequest}
19154      */
19155     public static final class Builder extends
19156         com.google.protobuf.GeneratedMessage.Builder<Builder>
19157        implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequestOrBuilder {
19158       public static final com.google.protobuf.Descriptors.Descriptor
19159           getDescriptor() {
19160         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_StopServerRequest_descriptor;
19161       }
19162 
19163       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
19164           internalGetFieldAccessorTable() {
19165         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_StopServerRequest_fieldAccessorTable
19166             .ensureFieldAccessorsInitialized(
19167                 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.Builder.class);
19168       }
19169 
19170       // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.newBuilder()
19171       private Builder() {
19172         maybeForceBuilderInitialization();
19173       }
19174 
19175       private Builder(
19176           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
19177         super(parent);
19178         maybeForceBuilderInitialization();
19179       }
19180       private void maybeForceBuilderInitialization() {
19181         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
19182         }
19183       }
19184       private static Builder create() {
19185         return new Builder();
19186       }
19187 
19188       public Builder clear() {
19189         super.clear();
19190         reason_ = "";
19191         bitField0_ = (bitField0_ & ~0x00000001);
19192         return this;
19193       }
19194 
19195       public Builder clone() {
19196         return create().mergeFrom(buildPartial());
19197       }
19198 
19199       public com.google.protobuf.Descriptors.Descriptor
19200           getDescriptorForType() {
19201         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_StopServerRequest_descriptor;
19202       }
19203 
19204       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest getDefaultInstanceForType() {
19205         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.getDefaultInstance();
19206       }
19207 
19208       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest build() {
19209         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest result = buildPartial();
19210         if (!result.isInitialized()) {
19211           throw newUninitializedMessageException(result);
19212         }
19213         return result;
19214       }
19215 
19216       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest buildPartial() {
19217         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest(this);
19218         int from_bitField0_ = bitField0_;
19219         int to_bitField0_ = 0;
19220         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
19221           to_bitField0_ |= 0x00000001;
19222         }
19223         result.reason_ = reason_;
19224         result.bitField0_ = to_bitField0_;
19225         onBuilt();
19226         return result;
19227       }
19228 
19229       public Builder mergeFrom(com.google.protobuf.Message other) {
19230         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest) {
19231           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest)other);
19232         } else {
19233           super.mergeFrom(other);
19234           return this;
19235         }
19236       }
19237 
19238       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest other) {
19239         if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.getDefaultInstance()) return this;
19240         if (other.hasReason()) {
19241           bitField0_ |= 0x00000001;
19242           reason_ = other.reason_;
19243           onChanged();
19244         }
19245         this.mergeUnknownFields(other.getUnknownFields());
19246         return this;
19247       }
19248 
19249       public final boolean isInitialized() {
19250         if (!hasReason()) {
19251           
19252           return false;
19253         }
19254         return true;
19255       }
19256 
19257       public Builder mergeFrom(
19258           com.google.protobuf.CodedInputStream input,
19259           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19260           throws java.io.IOException {
19261         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parsedMessage = null;
19262         try {
19263           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
19264         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
19265           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest) e.getUnfinishedMessage();
19266           throw e;
19267         } finally {
19268           if (parsedMessage != null) {
19269             mergeFrom(parsedMessage);
19270           }
19271         }
19272         return this;
19273       }
19274       private int bitField0_;
19275 
19276       // required string reason = 1;
19277       private java.lang.Object reason_ = "";
19278       /**
19279        * <code>required string reason = 1;</code>
19280        */
19281       public boolean hasReason() {
19282         return ((bitField0_ & 0x00000001) == 0x00000001);
19283       }
19284       /**
19285        * <code>required string reason = 1;</code>
19286        */
19287       public java.lang.String getReason() {
19288         java.lang.Object ref = reason_;
19289         if (!(ref instanceof java.lang.String)) {
19290           java.lang.String s = ((com.google.protobuf.ByteString) ref)
19291               .toStringUtf8();
19292           reason_ = s;
19293           return s;
19294         } else {
19295           return (java.lang.String) ref;
19296         }
19297       }
19298       /**
19299        * <code>required string reason = 1;</code>
19300        */
19301       public com.google.protobuf.ByteString
19302           getReasonBytes() {
19303         java.lang.Object ref = reason_;
19304         if (ref instanceof String) {
19305           com.google.protobuf.ByteString b = 
19306               com.google.protobuf.ByteString.copyFromUtf8(
19307                   (java.lang.String) ref);
19308           reason_ = b;
19309           return b;
19310         } else {
19311           return (com.google.protobuf.ByteString) ref;
19312         }
19313       }
19314       /**
19315        * <code>required string reason = 1;</code>
19316        */
19317       public Builder setReason(
19318           java.lang.String value) {
19319         if (value == null) {
19320     throw new NullPointerException();
19321   }
19322   bitField0_ |= 0x00000001;
19323         reason_ = value;
19324         onChanged();
19325         return this;
19326       }
19327       /**
19328        * <code>required string reason = 1;</code>
19329        */
19330       public Builder clearReason() {
19331         bitField0_ = (bitField0_ & ~0x00000001);
19332         reason_ = getDefaultInstance().getReason();
19333         onChanged();
19334         return this;
19335       }
19336       /**
19337        * <code>required string reason = 1;</code>
19338        */
19339       public Builder setReasonBytes(
19340           com.google.protobuf.ByteString value) {
19341         if (value == null) {
19342     throw new NullPointerException();
19343   }
19344   bitField0_ |= 0x00000001;
19345         reason_ = value;
19346         onChanged();
19347         return this;
19348       }
19349 
19350       // @@protoc_insertion_point(builder_scope:hbase.pb.StopServerRequest)
19351     }
19352 
19353     static {
19354       defaultInstance = new StopServerRequest(true);
19355       defaultInstance.initFields();
19356     }
19357 
19358     // @@protoc_insertion_point(class_scope:hbase.pb.StopServerRequest)
19359   }
19360 
19361   public interface StopServerResponseOrBuilder
19362       extends com.google.protobuf.MessageOrBuilder {
19363   }
19364   /**
19365    * Protobuf type {@code hbase.pb.StopServerResponse}
19366    */
19367   public static final class StopServerResponse extends
19368       com.google.protobuf.GeneratedMessage
19369       implements StopServerResponseOrBuilder {
19370     // Use StopServerResponse.newBuilder() to construct.
19371     private StopServerResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
19372       super(builder);
19373       this.unknownFields = builder.getUnknownFields();
19374     }
19375     private StopServerResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
19376 
19377     private static final StopServerResponse defaultInstance;
19378     public static StopServerResponse getDefaultInstance() {
19379       return defaultInstance;
19380     }
19381 
19382     public StopServerResponse getDefaultInstanceForType() {
19383       return defaultInstance;
19384     }
19385 
19386     private final com.google.protobuf.UnknownFieldSet unknownFields;
19387     @java.lang.Override
19388     public final com.google.protobuf.UnknownFieldSet
19389         getUnknownFields() {
19390       return this.unknownFields;
19391     }
19392     private StopServerResponse(
19393         com.google.protobuf.CodedInputStream input,
19394         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19395         throws com.google.protobuf.InvalidProtocolBufferException {
19396       initFields();
19397       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
19398           com.google.protobuf.UnknownFieldSet.newBuilder();
19399       try {
19400         boolean done = false;
19401         while (!done) {
19402           int tag = input.readTag();
19403           switch (tag) {
19404             case 0:
19405               done = true;
19406               break;
19407             default: {
19408               if (!parseUnknownField(input, unknownFields,
19409                                      extensionRegistry, tag)) {
19410                 done = true;
19411               }
19412               break;
19413             }
19414           }
19415         }
19416       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
19417         throw e.setUnfinishedMessage(this);
19418       } catch (java.io.IOException e) {
19419         throw new com.google.protobuf.InvalidProtocolBufferException(
19420             e.getMessage()).setUnfinishedMessage(this);
19421       } finally {
19422         this.unknownFields = unknownFields.build();
19423         makeExtensionsImmutable();
19424       }
19425     }
19426     public static final com.google.protobuf.Descriptors.Descriptor
19427         getDescriptor() {
19428       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_StopServerResponse_descriptor;
19429     }
19430 
19431     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
19432         internalGetFieldAccessorTable() {
19433       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_StopServerResponse_fieldAccessorTable
19434           .ensureFieldAccessorsInitialized(
19435               org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.Builder.class);
19436     }
19437 
19438     public static com.google.protobuf.Parser<StopServerResponse> PARSER =
19439         new com.google.protobuf.AbstractParser<StopServerResponse>() {
19440       public StopServerResponse parsePartialFrom(
19441           com.google.protobuf.CodedInputStream input,
19442           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19443           throws com.google.protobuf.InvalidProtocolBufferException {
19444         return new StopServerResponse(input, extensionRegistry);
19445       }
19446     };
19447 
19448     @java.lang.Override
19449     public com.google.protobuf.Parser<StopServerResponse> getParserForType() {
19450       return PARSER;
19451     }
19452 
19453     private void initFields() {
19454     }
19455     private byte memoizedIsInitialized = -1;
19456     public final boolean isInitialized() {
19457       byte isInitialized = memoizedIsInitialized;
19458       if (isInitialized != -1) return isInitialized == 1;
19459 
19460       memoizedIsInitialized = 1;
19461       return true;
19462     }
19463 
19464     public void writeTo(com.google.protobuf.CodedOutputStream output)
19465                         throws java.io.IOException {
19466       getSerializedSize();
19467       getUnknownFields().writeTo(output);
19468     }
19469 
19470     private int memoizedSerializedSize = -1;
19471     public int getSerializedSize() {
19472       int size = memoizedSerializedSize;
19473       if (size != -1) return size;
19474 
19475       size = 0;
19476       size += getUnknownFields().getSerializedSize();
19477       memoizedSerializedSize = size;
19478       return size;
19479     }
19480 
19481     private static final long serialVersionUID = 0L;
19482     @java.lang.Override
19483     protected java.lang.Object writeReplace()
19484         throws java.io.ObjectStreamException {
19485       return super.writeReplace();
19486     }
19487 
19488     @java.lang.Override
19489     public boolean equals(final java.lang.Object obj) {
19490       if (obj == this) {
19491        return true;
19492       }
19493       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse)) {
19494         return super.equals(obj);
19495       }
19496       org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse) obj;
19497 
19498       boolean result = true;
19499       result = result &&
19500           getUnknownFields().equals(other.getUnknownFields());
19501       return result;
19502     }
19503 
19504     private int memoizedHashCode = 0;
19505     @java.lang.Override
19506     public int hashCode() {
19507       if (memoizedHashCode != 0) {
19508         return memoizedHashCode;
19509       }
19510       int hash = 41;
19511       hash = (19 * hash) + getDescriptorForType().hashCode();
19512       hash = (29 * hash) + getUnknownFields().hashCode();
19513       memoizedHashCode = hash;
19514       return hash;
19515     }
19516 
19517     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom(
19518         com.google.protobuf.ByteString data)
19519         throws com.google.protobuf.InvalidProtocolBufferException {
19520       return PARSER.parseFrom(data);
19521     }
19522     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom(
19523         com.google.protobuf.ByteString data,
19524         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19525         throws com.google.protobuf.InvalidProtocolBufferException {
19526       return PARSER.parseFrom(data, extensionRegistry);
19527     }
19528     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom(byte[] data)
19529         throws com.google.protobuf.InvalidProtocolBufferException {
19530       return PARSER.parseFrom(data);
19531     }
19532     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom(
19533         byte[] data,
19534         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19535         throws com.google.protobuf.InvalidProtocolBufferException {
19536       return PARSER.parseFrom(data, extensionRegistry);
19537     }
19538     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom(java.io.InputStream input)
19539         throws java.io.IOException {
19540       return PARSER.parseFrom(input);
19541     }
19542     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom(
19543         java.io.InputStream input,
19544         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19545         throws java.io.IOException {
19546       return PARSER.parseFrom(input, extensionRegistry);
19547     }
19548     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseDelimitedFrom(java.io.InputStream input)
19549         throws java.io.IOException {
19550       return PARSER.parseDelimitedFrom(input);
19551     }
19552     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseDelimitedFrom(
19553         java.io.InputStream input,
19554         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19555         throws java.io.IOException {
19556       return PARSER.parseDelimitedFrom(input, extensionRegistry);
19557     }
19558     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom(
19559         com.google.protobuf.CodedInputStream input)
19560         throws java.io.IOException {
19561       return PARSER.parseFrom(input);
19562     }
19563     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom(
19564         com.google.protobuf.CodedInputStream input,
19565         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19566         throws java.io.IOException {
19567       return PARSER.parseFrom(input, extensionRegistry);
19568     }
19569 
19570     public static Builder newBuilder() { return Builder.create(); }
19571     public Builder newBuilderForType() { return newBuilder(); }
19572     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse prototype) {
19573       return newBuilder().mergeFrom(prototype);
19574     }
19575     public Builder toBuilder() { return newBuilder(this); }
19576 
19577     @java.lang.Override
19578     protected Builder newBuilderForType(
19579         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
19580       Builder builder = new Builder(parent);
19581       return builder;
19582     }
19583     /**
19584      * Protobuf type {@code hbase.pb.StopServerResponse}
19585      */
19586     public static final class Builder extends
19587         com.google.protobuf.GeneratedMessage.Builder<Builder>
19588        implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponseOrBuilder {
19589       public static final com.google.protobuf.Descriptors.Descriptor
19590           getDescriptor() {
19591         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_StopServerResponse_descriptor;
19592       }
19593 
19594       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
19595           internalGetFieldAccessorTable() {
19596         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_StopServerResponse_fieldAccessorTable
19597             .ensureFieldAccessorsInitialized(
19598                 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.Builder.class);
19599       }
19600 
19601       // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.newBuilder()
19602       private Builder() {
19603         maybeForceBuilderInitialization();
19604       }
19605 
19606       private Builder(
19607           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
19608         super(parent);
19609         maybeForceBuilderInitialization();
19610       }
19611       private void maybeForceBuilderInitialization() {
19612         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
19613         }
19614       }
19615       private static Builder create() {
19616         return new Builder();
19617       }
19618 
19619       public Builder clear() {
19620         super.clear();
19621         return this;
19622       }
19623 
19624       public Builder clone() {
19625         return create().mergeFrom(buildPartial());
19626       }
19627 
19628       public com.google.protobuf.Descriptors.Descriptor
19629           getDescriptorForType() {
19630         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_StopServerResponse_descriptor;
19631       }
19632 
19633       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse getDefaultInstanceForType() {
19634         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDefaultInstance();
19635       }
19636 
19637       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse build() {
19638         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse result = buildPartial();
19639         if (!result.isInitialized()) {
19640           throw newUninitializedMessageException(result);
19641         }
19642         return result;
19643       }
19644 
19645       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse buildPartial() {
19646         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse(this);
19647         onBuilt();
19648         return result;
19649       }
19650 
19651       public Builder mergeFrom(com.google.protobuf.Message other) {
19652         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse) {
19653           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse)other);
19654         } else {
19655           super.mergeFrom(other);
19656           return this;
19657         }
19658       }
19659 
19660       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse other) {
19661         if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDefaultInstance()) return this;
19662         this.mergeUnknownFields(other.getUnknownFields());
19663         return this;
19664       }
19665 
19666       public final boolean isInitialized() {
19667         return true;
19668       }
19669 
19670       public Builder mergeFrom(
19671           com.google.protobuf.CodedInputStream input,
19672           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19673           throws java.io.IOException {
19674         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parsedMessage = null;
19675         try {
19676           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
19677         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
19678           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse) e.getUnfinishedMessage();
19679           throw e;
19680         } finally {
19681           if (parsedMessage != null) {
19682             mergeFrom(parsedMessage);
19683           }
19684         }
19685         return this;
19686       }
19687 
19688       // @@protoc_insertion_point(builder_scope:hbase.pb.StopServerResponse)
19689     }
19690 
19691     static {
19692       defaultInstance = new StopServerResponse(true);
19693       defaultInstance.initFields();
19694     }
19695 
19696     // @@protoc_insertion_point(class_scope:hbase.pb.StopServerResponse)
19697   }
19698 
19699   public interface GetServerInfoRequestOrBuilder
19700       extends com.google.protobuf.MessageOrBuilder {
19701   }
19702   /**
19703    * Protobuf type {@code hbase.pb.GetServerInfoRequest}
19704    */
19705   public static final class GetServerInfoRequest extends
19706       com.google.protobuf.GeneratedMessage
19707       implements GetServerInfoRequestOrBuilder {
19708     // Use GetServerInfoRequest.newBuilder() to construct.
19709     private GetServerInfoRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
19710       super(builder);
19711       this.unknownFields = builder.getUnknownFields();
19712     }
19713     private GetServerInfoRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
19714 
19715     private static final GetServerInfoRequest defaultInstance;
19716     public static GetServerInfoRequest getDefaultInstance() {
19717       return defaultInstance;
19718     }
19719 
19720     public GetServerInfoRequest getDefaultInstanceForType() {
19721       return defaultInstance;
19722     }
19723 
19724     private final com.google.protobuf.UnknownFieldSet unknownFields;
19725     @java.lang.Override
19726     public final com.google.protobuf.UnknownFieldSet
19727         getUnknownFields() {
19728       return this.unknownFields;
19729     }
19730     private GetServerInfoRequest(
19731         com.google.protobuf.CodedInputStream input,
19732         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19733         throws com.google.protobuf.InvalidProtocolBufferException {
19734       initFields();
19735       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
19736           com.google.protobuf.UnknownFieldSet.newBuilder();
19737       try {
19738         boolean done = false;
19739         while (!done) {
19740           int tag = input.readTag();
19741           switch (tag) {
19742             case 0:
19743               done = true;
19744               break;
19745             default: {
19746               if (!parseUnknownField(input, unknownFields,
19747                                      extensionRegistry, tag)) {
19748                 done = true;
19749               }
19750               break;
19751             }
19752           }
19753         }
19754       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
19755         throw e.setUnfinishedMessage(this);
19756       } catch (java.io.IOException e) {
19757         throw new com.google.protobuf.InvalidProtocolBufferException(
19758             e.getMessage()).setUnfinishedMessage(this);
19759       } finally {
19760         this.unknownFields = unknownFields.build();
19761         makeExtensionsImmutable();
19762       }
19763     }
19764     public static final com.google.protobuf.Descriptors.Descriptor
19765         getDescriptor() {
19766       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetServerInfoRequest_descriptor;
19767     }
19768 
19769     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
19770         internalGetFieldAccessorTable() {
19771       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetServerInfoRequest_fieldAccessorTable
19772           .ensureFieldAccessorsInitialized(
19773               org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.Builder.class);
19774     }
19775 
19776     public static com.google.protobuf.Parser<GetServerInfoRequest> PARSER =
19777         new com.google.protobuf.AbstractParser<GetServerInfoRequest>() {
19778       public GetServerInfoRequest parsePartialFrom(
19779           com.google.protobuf.CodedInputStream input,
19780           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19781           throws com.google.protobuf.InvalidProtocolBufferException {
19782         return new GetServerInfoRequest(input, extensionRegistry);
19783       }
19784     };
19785 
19786     @java.lang.Override
19787     public com.google.protobuf.Parser<GetServerInfoRequest> getParserForType() {
19788       return PARSER;
19789     }
19790 
19791     private void initFields() {
19792     }
19793     private byte memoizedIsInitialized = -1;
19794     public final boolean isInitialized() {
19795       byte isInitialized = memoizedIsInitialized;
19796       if (isInitialized != -1) return isInitialized == 1;
19797 
19798       memoizedIsInitialized = 1;
19799       return true;
19800     }
19801 
19802     public void writeTo(com.google.protobuf.CodedOutputStream output)
19803                         throws java.io.IOException {
19804       getSerializedSize();
19805       getUnknownFields().writeTo(output);
19806     }
19807 
19808     private int memoizedSerializedSize = -1;
19809     public int getSerializedSize() {
19810       int size = memoizedSerializedSize;
19811       if (size != -1) return size;
19812 
19813       size = 0;
19814       size += getUnknownFields().getSerializedSize();
19815       memoizedSerializedSize = size;
19816       return size;
19817     }
19818 
19819     private static final long serialVersionUID = 0L;
19820     @java.lang.Override
19821     protected java.lang.Object writeReplace()
19822         throws java.io.ObjectStreamException {
19823       return super.writeReplace();
19824     }
19825 
19826     @java.lang.Override
19827     public boolean equals(final java.lang.Object obj) {
19828       if (obj == this) {
19829        return true;
19830       }
19831       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest)) {
19832         return super.equals(obj);
19833       }
19834       org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest) obj;
19835 
19836       boolean result = true;
19837       result = result &&
19838           getUnknownFields().equals(other.getUnknownFields());
19839       return result;
19840     }
19841 
19842     private int memoizedHashCode = 0;
19843     @java.lang.Override
19844     public int hashCode() {
19845       if (memoizedHashCode != 0) {
19846         return memoizedHashCode;
19847       }
19848       int hash = 41;
19849       hash = (19 * hash) + getDescriptorForType().hashCode();
19850       hash = (29 * hash) + getUnknownFields().hashCode();
19851       memoizedHashCode = hash;
19852       return hash;
19853     }
19854 
19855     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom(
19856         com.google.protobuf.ByteString data)
19857         throws com.google.protobuf.InvalidProtocolBufferException {
19858       return PARSER.parseFrom(data);
19859     }
19860     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom(
19861         com.google.protobuf.ByteString data,
19862         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19863         throws com.google.protobuf.InvalidProtocolBufferException {
19864       return PARSER.parseFrom(data, extensionRegistry);
19865     }
19866     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom(byte[] data)
19867         throws com.google.protobuf.InvalidProtocolBufferException {
19868       return PARSER.parseFrom(data);
19869     }
19870     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom(
19871         byte[] data,
19872         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19873         throws com.google.protobuf.InvalidProtocolBufferException {
19874       return PARSER.parseFrom(data, extensionRegistry);
19875     }
19876     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom(java.io.InputStream input)
19877         throws java.io.IOException {
19878       return PARSER.parseFrom(input);
19879     }
19880     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom(
19881         java.io.InputStream input,
19882         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19883         throws java.io.IOException {
19884       return PARSER.parseFrom(input, extensionRegistry);
19885     }
19886     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseDelimitedFrom(java.io.InputStream input)
19887         throws java.io.IOException {
19888       return PARSER.parseDelimitedFrom(input);
19889     }
19890     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseDelimitedFrom(
19891         java.io.InputStream input,
19892         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19893         throws java.io.IOException {
19894       return PARSER.parseDelimitedFrom(input, extensionRegistry);
19895     }
19896     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom(
19897         com.google.protobuf.CodedInputStream input)
19898         throws java.io.IOException {
19899       return PARSER.parseFrom(input);
19900     }
19901     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom(
19902         com.google.protobuf.CodedInputStream input,
19903         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19904         throws java.io.IOException {
19905       return PARSER.parseFrom(input, extensionRegistry);
19906     }
19907 
19908     public static Builder newBuilder() { return Builder.create(); }
19909     public Builder newBuilderForType() { return newBuilder(); }
19910     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest prototype) {
19911       return newBuilder().mergeFrom(prototype);
19912     }
19913     public Builder toBuilder() { return newBuilder(this); }
19914 
19915     @java.lang.Override
19916     protected Builder newBuilderForType(
19917         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
19918       Builder builder = new Builder(parent);
19919       return builder;
19920     }
19921     /**
19922      * Protobuf type {@code hbase.pb.GetServerInfoRequest}
19923      */
19924     public static final class Builder extends
19925         com.google.protobuf.GeneratedMessage.Builder<Builder>
19926        implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequestOrBuilder {
19927       public static final com.google.protobuf.Descriptors.Descriptor
19928           getDescriptor() {
19929         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetServerInfoRequest_descriptor;
19930       }
19931 
19932       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
19933           internalGetFieldAccessorTable() {
19934         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetServerInfoRequest_fieldAccessorTable
19935             .ensureFieldAccessorsInitialized(
19936                 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.Builder.class);
19937       }
19938 
19939       // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.newBuilder()
19940       private Builder() {
19941         maybeForceBuilderInitialization();
19942       }
19943 
19944       private Builder(
19945           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
19946         super(parent);
19947         maybeForceBuilderInitialization();
19948       }
19949       private void maybeForceBuilderInitialization() {
19950         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
19951         }
19952       }
19953       private static Builder create() {
19954         return new Builder();
19955       }
19956 
19957       public Builder clear() {
19958         super.clear();
19959         return this;
19960       }
19961 
19962       public Builder clone() {
19963         return create().mergeFrom(buildPartial());
19964       }
19965 
19966       public com.google.protobuf.Descriptors.Descriptor
19967           getDescriptorForType() {
19968         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetServerInfoRequest_descriptor;
19969       }
19970 
19971       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest getDefaultInstanceForType() {
19972         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.getDefaultInstance();
19973       }
19974 
19975       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest build() {
19976         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest result = buildPartial();
19977         if (!result.isInitialized()) {
19978           throw newUninitializedMessageException(result);
19979         }
19980         return result;
19981       }
19982 
19983       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest buildPartial() {
19984         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest(this);
19985         onBuilt();
19986         return result;
19987       }
19988 
19989       public Builder mergeFrom(com.google.protobuf.Message other) {
19990         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest) {
19991           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest)other);
19992         } else {
19993           super.mergeFrom(other);
19994           return this;
19995         }
19996       }
19997 
19998       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest other) {
19999         if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.getDefaultInstance()) return this;
20000         this.mergeUnknownFields(other.getUnknownFields());
20001         return this;
20002       }
20003 
20004       public final boolean isInitialized() {
20005         return true;
20006       }
20007 
20008       public Builder mergeFrom(
20009           com.google.protobuf.CodedInputStream input,
20010           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20011           throws java.io.IOException {
20012         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parsedMessage = null;
20013         try {
20014           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
20015         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
20016           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest) e.getUnfinishedMessage();
20017           throw e;
20018         } finally {
20019           if (parsedMessage != null) {
20020             mergeFrom(parsedMessage);
20021           }
20022         }
20023         return this;
20024       }
20025 
20026       // @@protoc_insertion_point(builder_scope:hbase.pb.GetServerInfoRequest)
20027     }
20028 
20029     static {
20030       defaultInstance = new GetServerInfoRequest(true);
20031       defaultInstance.initFields();
20032     }
20033 
20034     // @@protoc_insertion_point(class_scope:hbase.pb.GetServerInfoRequest)
20035   }
20036 
20037   public interface ServerInfoOrBuilder
20038       extends com.google.protobuf.MessageOrBuilder {
20039 
20040     // required .hbase.pb.ServerName server_name = 1;
20041     /**
20042      * <code>required .hbase.pb.ServerName server_name = 1;</code>
20043      */
20044     boolean hasServerName();
20045     /**
20046      * <code>required .hbase.pb.ServerName server_name = 1;</code>
20047      */
20048     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName();
20049     /**
20050      * <code>required .hbase.pb.ServerName server_name = 1;</code>
20051      */
20052     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder();
20053 
20054     // optional uint32 webui_port = 2;
20055     /**
20056      * <code>optional uint32 webui_port = 2;</code>
20057      */
20058     boolean hasWebuiPort();
20059     /**
20060      * <code>optional uint32 webui_port = 2;</code>
20061      */
20062     int getWebuiPort();
20063   }
20064   /**
20065    * Protobuf type {@code hbase.pb.ServerInfo}
20066    */
20067   public static final class ServerInfo extends
20068       com.google.protobuf.GeneratedMessage
20069       implements ServerInfoOrBuilder {
20070     // Use ServerInfo.newBuilder() to construct.
20071     private ServerInfo(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
20072       super(builder);
20073       this.unknownFields = builder.getUnknownFields();
20074     }
20075     private ServerInfo(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
20076 
20077     private static final ServerInfo defaultInstance;
20078     public static ServerInfo getDefaultInstance() {
20079       return defaultInstance;
20080     }
20081 
20082     public ServerInfo getDefaultInstanceForType() {
20083       return defaultInstance;
20084     }
20085 
20086     private final com.google.protobuf.UnknownFieldSet unknownFields;
20087     @java.lang.Override
20088     public final com.google.protobuf.UnknownFieldSet
20089         getUnknownFields() {
20090       return this.unknownFields;
20091     }
20092     private ServerInfo(
20093         com.google.protobuf.CodedInputStream input,
20094         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20095         throws com.google.protobuf.InvalidProtocolBufferException {
20096       initFields();
20097       int mutable_bitField0_ = 0;
20098       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
20099           com.google.protobuf.UnknownFieldSet.newBuilder();
20100       try {
20101         boolean done = false;
20102         while (!done) {
20103           int tag = input.readTag();
20104           switch (tag) {
20105             case 0:
20106               done = true;
20107               break;
20108             default: {
20109               if (!parseUnknownField(input, unknownFields,
20110                                      extensionRegistry, tag)) {
20111                 done = true;
20112               }
20113               break;
20114             }
20115             case 10: {
20116               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = null;
20117               if (((bitField0_ & 0x00000001) == 0x00000001)) {
20118                 subBuilder = serverName_.toBuilder();
20119               }
20120               serverName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry);
20121               if (subBuilder != null) {
20122                 subBuilder.mergeFrom(serverName_);
20123                 serverName_ = subBuilder.buildPartial();
20124               }
20125               bitField0_ |= 0x00000001;
20126               break;
20127             }
20128             case 16: {
20129               bitField0_ |= 0x00000002;
20130               webuiPort_ = input.readUInt32();
20131               break;
20132             }
20133           }
20134         }
20135       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
20136         throw e.setUnfinishedMessage(this);
20137       } catch (java.io.IOException e) {
20138         throw new com.google.protobuf.InvalidProtocolBufferException(
20139             e.getMessage()).setUnfinishedMessage(this);
20140       } finally {
20141         this.unknownFields = unknownFields.build();
20142         makeExtensionsImmutable();
20143       }
20144     }
20145     public static final com.google.protobuf.Descriptors.Descriptor
20146         getDescriptor() {
20147       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_ServerInfo_descriptor;
20148     }
20149 
20150     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
20151         internalGetFieldAccessorTable() {
20152       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_ServerInfo_fieldAccessorTable
20153           .ensureFieldAccessorsInitialized(
20154               org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.Builder.class);
20155     }
20156 
20157     public static com.google.protobuf.Parser<ServerInfo> PARSER =
20158         new com.google.protobuf.AbstractParser<ServerInfo>() {
20159       public ServerInfo parsePartialFrom(
20160           com.google.protobuf.CodedInputStream input,
20161           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20162           throws com.google.protobuf.InvalidProtocolBufferException {
20163         return new ServerInfo(input, extensionRegistry);
20164       }
20165     };
20166 
20167     @java.lang.Override
20168     public com.google.protobuf.Parser<ServerInfo> getParserForType() {
20169       return PARSER;
20170     }
20171 
20172     private int bitField0_;
20173     // required .hbase.pb.ServerName server_name = 1;
20174     public static final int SERVER_NAME_FIELD_NUMBER = 1;
20175     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName serverName_;
20176     /**
20177      * <code>required .hbase.pb.ServerName server_name = 1;</code>
20178      */
20179     public boolean hasServerName() {
20180       return ((bitField0_ & 0x00000001) == 0x00000001);
20181     }
20182     /**
20183      * <code>required .hbase.pb.ServerName server_name = 1;</code>
20184      */
20185     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName() {
20186       return serverName_;
20187     }
20188     /**
20189      * <code>required .hbase.pb.ServerName server_name = 1;</code>
20190      */
20191     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder() {
20192       return serverName_;
20193     }
20194 
20195     // optional uint32 webui_port = 2;
20196     public static final int WEBUI_PORT_FIELD_NUMBER = 2;
20197     private int webuiPort_;
20198     /**
20199      * <code>optional uint32 webui_port = 2;</code>
20200      */
20201     public boolean hasWebuiPort() {
20202       return ((bitField0_ & 0x00000002) == 0x00000002);
20203     }
20204     /**
20205      * <code>optional uint32 webui_port = 2;</code>
20206      */
20207     public int getWebuiPort() {
20208       return webuiPort_;
20209     }
20210 
20211     private void initFields() {
20212       serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
20213       webuiPort_ = 0;
20214     }
20215     private byte memoizedIsInitialized = -1;
20216     public final boolean isInitialized() {
20217       byte isInitialized = memoizedIsInitialized;
20218       if (isInitialized != -1) return isInitialized == 1;
20219 
20220       if (!hasServerName()) {
20221         memoizedIsInitialized = 0;
20222         return false;
20223       }
20224       if (!getServerName().isInitialized()) {
20225         memoizedIsInitialized = 0;
20226         return false;
20227       }
20228       memoizedIsInitialized = 1;
20229       return true;
20230     }
20231 
20232     public void writeTo(com.google.protobuf.CodedOutputStream output)
20233                         throws java.io.IOException {
20234       getSerializedSize();
20235       if (((bitField0_ & 0x00000001) == 0x00000001)) {
20236         output.writeMessage(1, serverName_);
20237       }
20238       if (((bitField0_ & 0x00000002) == 0x00000002)) {
20239         output.writeUInt32(2, webuiPort_);
20240       }
20241       getUnknownFields().writeTo(output);
20242     }
20243 
20244     private int memoizedSerializedSize = -1;
20245     public int getSerializedSize() {
20246       int size = memoizedSerializedSize;
20247       if (size != -1) return size;
20248 
20249       size = 0;
20250       if (((bitField0_ & 0x00000001) == 0x00000001)) {
20251         size += com.google.protobuf.CodedOutputStream
20252           .computeMessageSize(1, serverName_);
20253       }
20254       if (((bitField0_ & 0x00000002) == 0x00000002)) {
20255         size += com.google.protobuf.CodedOutputStream
20256           .computeUInt32Size(2, webuiPort_);
20257       }
20258       size += getUnknownFields().getSerializedSize();
20259       memoizedSerializedSize = size;
20260       return size;
20261     }
20262 
20263     private static final long serialVersionUID = 0L;
20264     @java.lang.Override
20265     protected java.lang.Object writeReplace()
20266         throws java.io.ObjectStreamException {
20267       return super.writeReplace();
20268     }
20269 
20270     @java.lang.Override
20271     public boolean equals(final java.lang.Object obj) {
20272       if (obj == this) {
20273        return true;
20274       }
20275       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo)) {
20276         return super.equals(obj);
20277       }
20278       org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo) obj;
20279 
20280       boolean result = true;
20281       result = result && (hasServerName() == other.hasServerName());
20282       if (hasServerName()) {
20283         result = result && getServerName()
20284             .equals(other.getServerName());
20285       }
20286       result = result && (hasWebuiPort() == other.hasWebuiPort());
20287       if (hasWebuiPort()) {
20288         result = result && (getWebuiPort()
20289             == other.getWebuiPort());
20290       }
20291       result = result &&
20292           getUnknownFields().equals(other.getUnknownFields());
20293       return result;
20294     }
20295 
20296     private int memoizedHashCode = 0;
20297     @java.lang.Override
20298     public int hashCode() {
20299       if (memoizedHashCode != 0) {
20300         return memoizedHashCode;
20301       }
20302       int hash = 41;
20303       hash = (19 * hash) + getDescriptorForType().hashCode();
20304       if (hasServerName()) {
20305         hash = (37 * hash) + SERVER_NAME_FIELD_NUMBER;
20306         hash = (53 * hash) + getServerName().hashCode();
20307       }
20308       if (hasWebuiPort()) {
20309         hash = (37 * hash) + WEBUI_PORT_FIELD_NUMBER;
20310         hash = (53 * hash) + getWebuiPort();
20311       }
20312       hash = (29 * hash) + getUnknownFields().hashCode();
20313       memoizedHashCode = hash;
20314       return hash;
20315     }
20316 
20317     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo parseFrom(
20318         com.google.protobuf.ByteString data)
20319         throws com.google.protobuf.InvalidProtocolBufferException {
20320       return PARSER.parseFrom(data);
20321     }
20322     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo parseFrom(
20323         com.google.protobuf.ByteString data,
20324         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20325         throws com.google.protobuf.InvalidProtocolBufferException {
20326       return PARSER.parseFrom(data, extensionRegistry);
20327     }
20328     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo parseFrom(byte[] data)
20329         throws com.google.protobuf.InvalidProtocolBufferException {
20330       return PARSER.parseFrom(data);
20331     }
20332     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo parseFrom(
20333         byte[] data,
20334         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20335         throws com.google.protobuf.InvalidProtocolBufferException {
20336       return PARSER.parseFrom(data, extensionRegistry);
20337     }
20338     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo parseFrom(java.io.InputStream input)
20339         throws java.io.IOException {
20340       return PARSER.parseFrom(input);
20341     }
20342     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo parseFrom(
20343         java.io.InputStream input,
20344         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20345         throws java.io.IOException {
20346       return PARSER.parseFrom(input, extensionRegistry);
20347     }
20348     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo parseDelimitedFrom(java.io.InputStream input)
20349         throws java.io.IOException {
20350       return PARSER.parseDelimitedFrom(input);
20351     }
20352     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo parseDelimitedFrom(
20353         java.io.InputStream input,
20354         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20355         throws java.io.IOException {
20356       return PARSER.parseDelimitedFrom(input, extensionRegistry);
20357     }
20358     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo parseFrom(
20359         com.google.protobuf.CodedInputStream input)
20360         throws java.io.IOException {
20361       return PARSER.parseFrom(input);
20362     }
20363     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo parseFrom(
20364         com.google.protobuf.CodedInputStream input,
20365         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20366         throws java.io.IOException {
20367       return PARSER.parseFrom(input, extensionRegistry);
20368     }
20369 
20370     public static Builder newBuilder() { return Builder.create(); }
20371     public Builder newBuilderForType() { return newBuilder(); }
20372     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo prototype) {
20373       return newBuilder().mergeFrom(prototype);
20374     }
20375     public Builder toBuilder() { return newBuilder(this); }
20376 
20377     @java.lang.Override
20378     protected Builder newBuilderForType(
20379         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
20380       Builder builder = new Builder(parent);
20381       return builder;
20382     }
20383     /**
20384      * Protobuf type {@code hbase.pb.ServerInfo}
20385      */
20386     public static final class Builder extends
20387         com.google.protobuf.GeneratedMessage.Builder<Builder>
20388        implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfoOrBuilder {
20389       public static final com.google.protobuf.Descriptors.Descriptor
20390           getDescriptor() {
20391         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_ServerInfo_descriptor;
20392       }
20393 
20394       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
20395           internalGetFieldAccessorTable() {
20396         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_ServerInfo_fieldAccessorTable
20397             .ensureFieldAccessorsInitialized(
20398                 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.Builder.class);
20399       }
20400 
20401       // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.newBuilder()
20402       private Builder() {
20403         maybeForceBuilderInitialization();
20404       }
20405 
20406       private Builder(
20407           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
20408         super(parent);
20409         maybeForceBuilderInitialization();
20410       }
20411       private void maybeForceBuilderInitialization() {
20412         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
20413           getServerNameFieldBuilder();
20414         }
20415       }
20416       private static Builder create() {
20417         return new Builder();
20418       }
20419 
20420       public Builder clear() {
20421         super.clear();
20422         if (serverNameBuilder_ == null) {
20423           serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
20424         } else {
20425           serverNameBuilder_.clear();
20426         }
20427         bitField0_ = (bitField0_ & ~0x00000001);
20428         webuiPort_ = 0;
20429         bitField0_ = (bitField0_ & ~0x00000002);
20430         return this;
20431       }
20432 
20433       public Builder clone() {
20434         return create().mergeFrom(buildPartial());
20435       }
20436 
20437       public com.google.protobuf.Descriptors.Descriptor
20438           getDescriptorForType() {
20439         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_ServerInfo_descriptor;
20440       }
20441 
20442       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo getDefaultInstanceForType() {
20443         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.getDefaultInstance();
20444       }
20445 
20446       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo build() {
20447         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo result = buildPartial();
20448         if (!result.isInitialized()) {
20449           throw newUninitializedMessageException(result);
20450         }
20451         return result;
20452       }
20453 
20454       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo buildPartial() {
20455         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo(this);
20456         int from_bitField0_ = bitField0_;
20457         int to_bitField0_ = 0;
20458         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
20459           to_bitField0_ |= 0x00000001;
20460         }
20461         if (serverNameBuilder_ == null) {
20462           result.serverName_ = serverName_;
20463         } else {
20464           result.serverName_ = serverNameBuilder_.build();
20465         }
20466         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
20467           to_bitField0_ |= 0x00000002;
20468         }
20469         result.webuiPort_ = webuiPort_;
20470         result.bitField0_ = to_bitField0_;
20471         onBuilt();
20472         return result;
20473       }
20474 
20475       public Builder mergeFrom(com.google.protobuf.Message other) {
20476         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo) {
20477           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo)other);
20478         } else {
20479           super.mergeFrom(other);
20480           return this;
20481         }
20482       }
20483 
20484       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo other) {
20485         if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.getDefaultInstance()) return this;
20486         if (other.hasServerName()) {
20487           mergeServerName(other.getServerName());
20488         }
20489         if (other.hasWebuiPort()) {
20490           setWebuiPort(other.getWebuiPort());
20491         }
20492         this.mergeUnknownFields(other.getUnknownFields());
20493         return this;
20494       }
20495 
20496       public final boolean isInitialized() {
20497         if (!hasServerName()) {
20498           
20499           return false;
20500         }
20501         if (!getServerName().isInitialized()) {
20502           
20503           return false;
20504         }
20505         return true;
20506       }
20507 
20508       public Builder mergeFrom(
20509           com.google.protobuf.CodedInputStream input,
20510           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20511           throws java.io.IOException {
20512         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo parsedMessage = null;
20513         try {
20514           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
20515         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
20516           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo) e.getUnfinishedMessage();
20517           throw e;
20518         } finally {
20519           if (parsedMessage != null) {
20520             mergeFrom(parsedMessage);
20521           }
20522         }
20523         return this;
20524       }
20525       private int bitField0_;
20526 
20527       // required .hbase.pb.ServerName server_name = 1;
20528       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
20529       private com.google.protobuf.SingleFieldBuilder<
20530           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverNameBuilder_;
20531       /**
20532        * <code>required .hbase.pb.ServerName server_name = 1;</code>
20533        */
20534       public boolean hasServerName() {
20535         return ((bitField0_ & 0x00000001) == 0x00000001);
20536       }
20537       /**
20538        * <code>required .hbase.pb.ServerName server_name = 1;</code>
20539        */
20540       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName() {
20541         if (serverNameBuilder_ == null) {
20542           return serverName_;
20543         } else {
20544           return serverNameBuilder_.getMessage();
20545         }
20546       }
20547       /**
20548        * <code>required .hbase.pb.ServerName server_name = 1;</code>
20549        */
20550       public Builder setServerName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) {
20551         if (serverNameBuilder_ == null) {
20552           if (value == null) {
20553             throw new NullPointerException();
20554           }
20555           serverName_ = value;
20556           onChanged();
20557         } else {
20558           serverNameBuilder_.setMessage(value);
20559         }
20560         bitField0_ |= 0x00000001;
20561         return this;
20562       }
20563       /**
20564        * <code>required .hbase.pb.ServerName server_name = 1;</code>
20565        */
20566       public Builder setServerName(
20567           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) {
20568         if (serverNameBuilder_ == null) {
20569           serverName_ = builderForValue.build();
20570           onChanged();
20571         } else {
20572           serverNameBuilder_.setMessage(builderForValue.build());
20573         }
20574         bitField0_ |= 0x00000001;
20575         return this;
20576       }
20577       /**
20578        * <code>required .hbase.pb.ServerName server_name = 1;</code>
20579        */
20580       public Builder mergeServerName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) {
20581         if (serverNameBuilder_ == null) {
20582           if (((bitField0_ & 0x00000001) == 0x00000001) &&
20583               serverName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) {
20584             serverName_ =
20585               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(serverName_).mergeFrom(value).buildPartial();
20586           } else {
20587             serverName_ = value;
20588           }
20589           onChanged();
20590         } else {
20591           serverNameBuilder_.mergeFrom(value);
20592         }
20593         bitField0_ |= 0x00000001;
20594         return this;
20595       }
20596       /**
20597        * <code>required .hbase.pb.ServerName server_name = 1;</code>
20598        */
20599       public Builder clearServerName() {
20600         if (serverNameBuilder_ == null) {
20601           serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
20602           onChanged();
20603         } else {
20604           serverNameBuilder_.clear();
20605         }
20606         bitField0_ = (bitField0_ & ~0x00000001);
20607         return this;
20608       }
20609       /**
20610        * <code>required .hbase.pb.ServerName server_name = 1;</code>
20611        */
20612       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getServerNameBuilder() {
20613         bitField0_ |= 0x00000001;
20614         onChanged();
20615         return getServerNameFieldBuilder().getBuilder();
20616       }
20617       /**
20618        * <code>required .hbase.pb.ServerName server_name = 1;</code>
20619        */
20620       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder() {
20621         if (serverNameBuilder_ != null) {
20622           return serverNameBuilder_.getMessageOrBuilder();
20623         } else {
20624           return serverName_;
20625         }
20626       }
20627       /**
20628        * <code>required .hbase.pb.ServerName server_name = 1;</code>
20629        */
20630       private com.google.protobuf.SingleFieldBuilder<
20631           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> 
20632           getServerNameFieldBuilder() {
20633         if (serverNameBuilder_ == null) {
20634           serverNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
20635               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>(
20636                   serverName_,
20637                   getParentForChildren(),
20638                   isClean());
20639           serverName_ = null;
20640         }
20641         return serverNameBuilder_;
20642       }
20643 
20644       // optional uint32 webui_port = 2;
20645       private int webuiPort_ ;
20646       /**
20647        * <code>optional uint32 webui_port = 2;</code>
20648        */
20649       public boolean hasWebuiPort() {
20650         return ((bitField0_ & 0x00000002) == 0x00000002);
20651       }
20652       /**
20653        * <code>optional uint32 webui_port = 2;</code>
20654        */
20655       public int getWebuiPort() {
20656         return webuiPort_;
20657       }
20658       /**
20659        * <code>optional uint32 webui_port = 2;</code>
20660        */
20661       public Builder setWebuiPort(int value) {
20662         bitField0_ |= 0x00000002;
20663         webuiPort_ = value;
20664         onChanged();
20665         return this;
20666       }
20667       /**
20668        * <code>optional uint32 webui_port = 2;</code>
20669        */
20670       public Builder clearWebuiPort() {
20671         bitField0_ = (bitField0_ & ~0x00000002);
20672         webuiPort_ = 0;
20673         onChanged();
20674         return this;
20675       }
20676 
20677       // @@protoc_insertion_point(builder_scope:hbase.pb.ServerInfo)
20678     }
20679 
20680     static {
20681       defaultInstance = new ServerInfo(true);
20682       defaultInstance.initFields();
20683     }
20684 
20685     // @@protoc_insertion_point(class_scope:hbase.pb.ServerInfo)
20686   }
20687 
20688   public interface GetServerInfoResponseOrBuilder
20689       extends com.google.protobuf.MessageOrBuilder {
20690 
20691     // required .hbase.pb.ServerInfo server_info = 1;
20692     /**
20693      * <code>required .hbase.pb.ServerInfo server_info = 1;</code>
20694      */
20695     boolean hasServerInfo();
20696     /**
20697      * <code>required .hbase.pb.ServerInfo server_info = 1;</code>
20698      */
20699     org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo getServerInfo();
20700     /**
20701      * <code>required .hbase.pb.ServerInfo server_info = 1;</code>
20702      */
20703     org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfoOrBuilder getServerInfoOrBuilder();
20704   }
20705   /**
20706    * Protobuf type {@code hbase.pb.GetServerInfoResponse}
20707    */
20708   public static final class GetServerInfoResponse extends
20709       com.google.protobuf.GeneratedMessage
20710       implements GetServerInfoResponseOrBuilder {
20711     // Use GetServerInfoResponse.newBuilder() to construct.
20712     private GetServerInfoResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
20713       super(builder);
20714       this.unknownFields = builder.getUnknownFields();
20715     }
20716     private GetServerInfoResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
20717 
20718     private static final GetServerInfoResponse defaultInstance;
20719     public static GetServerInfoResponse getDefaultInstance() {
20720       return defaultInstance;
20721     }
20722 
20723     public GetServerInfoResponse getDefaultInstanceForType() {
20724       return defaultInstance;
20725     }
20726 
20727     private final com.google.protobuf.UnknownFieldSet unknownFields;
20728     @java.lang.Override
20729     public final com.google.protobuf.UnknownFieldSet
20730         getUnknownFields() {
20731       return this.unknownFields;
20732     }
20733     private GetServerInfoResponse(
20734         com.google.protobuf.CodedInputStream input,
20735         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20736         throws com.google.protobuf.InvalidProtocolBufferException {
20737       initFields();
20738       int mutable_bitField0_ = 0;
20739       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
20740           com.google.protobuf.UnknownFieldSet.newBuilder();
20741       try {
20742         boolean done = false;
20743         while (!done) {
20744           int tag = input.readTag();
20745           switch (tag) {
20746             case 0:
20747               done = true;
20748               break;
20749             default: {
20750               if (!parseUnknownField(input, unknownFields,
20751                                      extensionRegistry, tag)) {
20752                 done = true;
20753               }
20754               break;
20755             }
20756             case 10: {
20757               org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.Builder subBuilder = null;
20758               if (((bitField0_ & 0x00000001) == 0x00000001)) {
20759                 subBuilder = serverInfo_.toBuilder();
20760               }
20761               serverInfo_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.PARSER, extensionRegistry);
20762               if (subBuilder != null) {
20763                 subBuilder.mergeFrom(serverInfo_);
20764                 serverInfo_ = subBuilder.buildPartial();
20765               }
20766               bitField0_ |= 0x00000001;
20767               break;
20768             }
20769           }
20770         }
20771       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
20772         throw e.setUnfinishedMessage(this);
20773       } catch (java.io.IOException e) {
20774         throw new com.google.protobuf.InvalidProtocolBufferException(
20775             e.getMessage()).setUnfinishedMessage(this);
20776       } finally {
20777         this.unknownFields = unknownFields.build();
20778         makeExtensionsImmutable();
20779       }
20780     }
20781     public static final com.google.protobuf.Descriptors.Descriptor
20782         getDescriptor() {
20783       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetServerInfoResponse_descriptor;
20784     }
20785 
20786     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
20787         internalGetFieldAccessorTable() {
20788       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetServerInfoResponse_fieldAccessorTable
20789           .ensureFieldAccessorsInitialized(
20790               org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.Builder.class);
20791     }
20792 
20793     public static com.google.protobuf.Parser<GetServerInfoResponse> PARSER =
20794         new com.google.protobuf.AbstractParser<GetServerInfoResponse>() {
20795       public GetServerInfoResponse parsePartialFrom(
20796           com.google.protobuf.CodedInputStream input,
20797           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20798           throws com.google.protobuf.InvalidProtocolBufferException {
20799         return new GetServerInfoResponse(input, extensionRegistry);
20800       }
20801     };
20802 
20803     @java.lang.Override
20804     public com.google.protobuf.Parser<GetServerInfoResponse> getParserForType() {
20805       return PARSER;
20806     }
20807 
20808     private int bitField0_;
20809     // required .hbase.pb.ServerInfo server_info = 1;
20810     public static final int SERVER_INFO_FIELD_NUMBER = 1;
20811     private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo serverInfo_;
20812     /**
20813      * <code>required .hbase.pb.ServerInfo server_info = 1;</code>
20814      */
20815     public boolean hasServerInfo() {
20816       return ((bitField0_ & 0x00000001) == 0x00000001);
20817     }
20818     /**
20819      * <code>required .hbase.pb.ServerInfo server_info = 1;</code>
20820      */
20821     public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo getServerInfo() {
20822       return serverInfo_;
20823     }
20824     /**
20825      * <code>required .hbase.pb.ServerInfo server_info = 1;</code>
20826      */
20827     public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfoOrBuilder getServerInfoOrBuilder() {
20828       return serverInfo_;
20829     }
20830 
20831     private void initFields() {
20832       serverInfo_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.getDefaultInstance();
20833     }
20834     private byte memoizedIsInitialized = -1;
20835     public final boolean isInitialized() {
20836       byte isInitialized = memoizedIsInitialized;
20837       if (isInitialized != -1) return isInitialized == 1;
20838 
20839       if (!hasServerInfo()) {
20840         memoizedIsInitialized = 0;
20841         return false;
20842       }
20843       if (!getServerInfo().isInitialized()) {
20844         memoizedIsInitialized = 0;
20845         return false;
20846       }
20847       memoizedIsInitialized = 1;
20848       return true;
20849     }
20850 
20851     public void writeTo(com.google.protobuf.CodedOutputStream output)
20852                         throws java.io.IOException {
20853       getSerializedSize();
20854       if (((bitField0_ & 0x00000001) == 0x00000001)) {
20855         output.writeMessage(1, serverInfo_);
20856       }
20857       getUnknownFields().writeTo(output);
20858     }
20859 
20860     private int memoizedSerializedSize = -1;
20861     public int getSerializedSize() {
20862       int size = memoizedSerializedSize;
20863       if (size != -1) return size;
20864 
20865       size = 0;
20866       if (((bitField0_ & 0x00000001) == 0x00000001)) {
20867         size += com.google.protobuf.CodedOutputStream
20868           .computeMessageSize(1, serverInfo_);
20869       }
20870       size += getUnknownFields().getSerializedSize();
20871       memoizedSerializedSize = size;
20872       return size;
20873     }
20874 
20875     private static final long serialVersionUID = 0L;
20876     @java.lang.Override
20877     protected java.lang.Object writeReplace()
20878         throws java.io.ObjectStreamException {
20879       return super.writeReplace();
20880     }
20881 
20882     @java.lang.Override
20883     public boolean equals(final java.lang.Object obj) {
20884       if (obj == this) {
20885        return true;
20886       }
20887       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse)) {
20888         return super.equals(obj);
20889       }
20890       org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse) obj;
20891 
20892       boolean result = true;
20893       result = result && (hasServerInfo() == other.hasServerInfo());
20894       if (hasServerInfo()) {
20895         result = result && getServerInfo()
20896             .equals(other.getServerInfo());
20897       }
20898       result = result &&
20899           getUnknownFields().equals(other.getUnknownFields());
20900       return result;
20901     }
20902 
20903     private int memoizedHashCode = 0;
20904     @java.lang.Override
20905     public int hashCode() {
20906       if (memoizedHashCode != 0) {
20907         return memoizedHashCode;
20908       }
20909       int hash = 41;
20910       hash = (19 * hash) + getDescriptorForType().hashCode();
20911       if (hasServerInfo()) {
20912         hash = (37 * hash) + SERVER_INFO_FIELD_NUMBER;
20913         hash = (53 * hash) + getServerInfo().hashCode();
20914       }
20915       hash = (29 * hash) + getUnknownFields().hashCode();
20916       memoizedHashCode = hash;
20917       return hash;
20918     }
20919 
20920     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom(
20921         com.google.protobuf.ByteString data)
20922         throws com.google.protobuf.InvalidProtocolBufferException {
20923       return PARSER.parseFrom(data);
20924     }
20925     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom(
20926         com.google.protobuf.ByteString data,
20927         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20928         throws com.google.protobuf.InvalidProtocolBufferException {
20929       return PARSER.parseFrom(data, extensionRegistry);
20930     }
20931     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom(byte[] data)
20932         throws com.google.protobuf.InvalidProtocolBufferException {
20933       return PARSER.parseFrom(data);
20934     }
20935     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom(
20936         byte[] data,
20937         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20938         throws com.google.protobuf.InvalidProtocolBufferException {
20939       return PARSER.parseFrom(data, extensionRegistry);
20940     }
20941     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom(java.io.InputStream input)
20942         throws java.io.IOException {
20943       return PARSER.parseFrom(input);
20944     }
20945     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom(
20946         java.io.InputStream input,
20947         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20948         throws java.io.IOException {
20949       return PARSER.parseFrom(input, extensionRegistry);
20950     }
20951     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseDelimitedFrom(java.io.InputStream input)
20952         throws java.io.IOException {
20953       return PARSER.parseDelimitedFrom(input);
20954     }
20955     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseDelimitedFrom(
20956         java.io.InputStream input,
20957         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20958         throws java.io.IOException {
20959       return PARSER.parseDelimitedFrom(input, extensionRegistry);
20960     }
20961     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom(
20962         com.google.protobuf.CodedInputStream input)
20963         throws java.io.IOException {
20964       return PARSER.parseFrom(input);
20965     }
20966     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom(
20967         com.google.protobuf.CodedInputStream input,
20968         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20969         throws java.io.IOException {
20970       return PARSER.parseFrom(input, extensionRegistry);
20971     }
20972 
20973     public static Builder newBuilder() { return Builder.create(); }
20974     public Builder newBuilderForType() { return newBuilder(); }
20975     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse prototype) {
20976       return newBuilder().mergeFrom(prototype);
20977     }
20978     public Builder toBuilder() { return newBuilder(this); }
20979 
20980     @java.lang.Override
20981     protected Builder newBuilderForType(
20982         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
20983       Builder builder = new Builder(parent);
20984       return builder;
20985     }
20986     /**
20987      * Protobuf type {@code hbase.pb.GetServerInfoResponse}
20988      */
20989     public static final class Builder extends
20990         com.google.protobuf.GeneratedMessage.Builder<Builder>
20991        implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponseOrBuilder {
20992       public static final com.google.protobuf.Descriptors.Descriptor
20993           getDescriptor() {
20994         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetServerInfoResponse_descriptor;
20995       }
20996 
20997       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
20998           internalGetFieldAccessorTable() {
20999         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetServerInfoResponse_fieldAccessorTable
21000             .ensureFieldAccessorsInitialized(
21001                 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.Builder.class);
21002       }
21003 
21004       // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.newBuilder()
21005       private Builder() {
21006         maybeForceBuilderInitialization();
21007       }
21008 
21009       private Builder(
21010           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
21011         super(parent);
21012         maybeForceBuilderInitialization();
21013       }
21014       private void maybeForceBuilderInitialization() {
21015         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
21016           getServerInfoFieldBuilder();
21017         }
21018       }
21019       private static Builder create() {
21020         return new Builder();
21021       }
21022 
21023       public Builder clear() {
21024         super.clear();
21025         if (serverInfoBuilder_ == null) {
21026           serverInfo_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.getDefaultInstance();
21027         } else {
21028           serverInfoBuilder_.clear();
21029         }
21030         bitField0_ = (bitField0_ & ~0x00000001);
21031         return this;
21032       }
21033 
21034       public Builder clone() {
21035         return create().mergeFrom(buildPartial());
21036       }
21037 
21038       public com.google.protobuf.Descriptors.Descriptor
21039           getDescriptorForType() {
21040         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetServerInfoResponse_descriptor;
21041       }
21042 
21043       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse getDefaultInstanceForType() {
21044         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.getDefaultInstance();
21045       }
21046 
21047       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse build() {
21048         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse result = buildPartial();
21049         if (!result.isInitialized()) {
21050           throw newUninitializedMessageException(result);
21051         }
21052         return result;
21053       }
21054 
21055       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse buildPartial() {
21056         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse(this);
21057         int from_bitField0_ = bitField0_;
21058         int to_bitField0_ = 0;
21059         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
21060           to_bitField0_ |= 0x00000001;
21061         }
21062         if (serverInfoBuilder_ == null) {
21063           result.serverInfo_ = serverInfo_;
21064         } else {
21065           result.serverInfo_ = serverInfoBuilder_.build();
21066         }
21067         result.bitField0_ = to_bitField0_;
21068         onBuilt();
21069         return result;
21070       }
21071 
21072       public Builder mergeFrom(com.google.protobuf.Message other) {
21073         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse) {
21074           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse)other);
21075         } else {
21076           super.mergeFrom(other);
21077           return this;
21078         }
21079       }
21080 
21081       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse other) {
21082         if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.getDefaultInstance()) return this;
21083         if (other.hasServerInfo()) {
21084           mergeServerInfo(other.getServerInfo());
21085         }
21086         this.mergeUnknownFields(other.getUnknownFields());
21087         return this;
21088       }
21089 
21090       public final boolean isInitialized() {
21091         if (!hasServerInfo()) {
21092           
21093           return false;
21094         }
21095         if (!getServerInfo().isInitialized()) {
21096           
21097           return false;
21098         }
21099         return true;
21100       }
21101 
21102       public Builder mergeFrom(
21103           com.google.protobuf.CodedInputStream input,
21104           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21105           throws java.io.IOException {
21106         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parsedMessage = null;
21107         try {
21108           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
21109         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
21110           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse) e.getUnfinishedMessage();
21111           throw e;
21112         } finally {
21113           if (parsedMessage != null) {
21114             mergeFrom(parsedMessage);
21115           }
21116         }
21117         return this;
21118       }
21119       private int bitField0_;
21120 
21121       // required .hbase.pb.ServerInfo server_info = 1;
21122       private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo serverInfo_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.getDefaultInstance();
21123       private com.google.protobuf.SingleFieldBuilder<
21124           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfoOrBuilder> serverInfoBuilder_;
21125       /**
21126        * <code>required .hbase.pb.ServerInfo server_info = 1;</code>
21127        */
21128       public boolean hasServerInfo() {
21129         return ((bitField0_ & 0x00000001) == 0x00000001);
21130       }
21131       /**
21132        * <code>required .hbase.pb.ServerInfo server_info = 1;</code>
21133        */
21134       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo getServerInfo() {
21135         if (serverInfoBuilder_ == null) {
21136           return serverInfo_;
21137         } else {
21138           return serverInfoBuilder_.getMessage();
21139         }
21140       }
21141       /**
21142        * <code>required .hbase.pb.ServerInfo server_info = 1;</code>
21143        */
21144       public Builder setServerInfo(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo value) {
21145         if (serverInfoBuilder_ == null) {
21146           if (value == null) {
21147             throw new NullPointerException();
21148           }
21149           serverInfo_ = value;
21150           onChanged();
21151         } else {
21152           serverInfoBuilder_.setMessage(value);
21153         }
21154         bitField0_ |= 0x00000001;
21155         return this;
21156       }
21157       /**
21158        * <code>required .hbase.pb.ServerInfo server_info = 1;</code>
21159        */
21160       public Builder setServerInfo(
21161           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.Builder builderForValue) {
21162         if (serverInfoBuilder_ == null) {
21163           serverInfo_ = builderForValue.build();
21164           onChanged();
21165         } else {
21166           serverInfoBuilder_.setMessage(builderForValue.build());
21167         }
21168         bitField0_ |= 0x00000001;
21169         return this;
21170       }
21171       /**
21172        * <code>required .hbase.pb.ServerInfo server_info = 1;</code>
21173        */
21174       public Builder mergeServerInfo(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo value) {
21175         if (serverInfoBuilder_ == null) {
21176           if (((bitField0_ & 0x00000001) == 0x00000001) &&
21177               serverInfo_ != org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.getDefaultInstance()) {
21178             serverInfo_ =
21179               org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.newBuilder(serverInfo_).mergeFrom(value).buildPartial();
21180           } else {
21181             serverInfo_ = value;
21182           }
21183           onChanged();
21184         } else {
21185           serverInfoBuilder_.mergeFrom(value);
21186         }
21187         bitField0_ |= 0x00000001;
21188         return this;
21189       }
21190       /**
21191        * <code>required .hbase.pb.ServerInfo server_info = 1;</code>
21192        */
21193       public Builder clearServerInfo() {
21194         if (serverInfoBuilder_ == null) {
21195           serverInfo_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.getDefaultInstance();
21196           onChanged();
21197         } else {
21198           serverInfoBuilder_.clear();
21199         }
21200         bitField0_ = (bitField0_ & ~0x00000001);
21201         return this;
21202       }
21203       /**
21204        * <code>required .hbase.pb.ServerInfo server_info = 1;</code>
21205        */
21206       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.Builder getServerInfoBuilder() {
21207         bitField0_ |= 0x00000001;
21208         onChanged();
21209         return getServerInfoFieldBuilder().getBuilder();
21210       }
21211       /**
21212        * <code>required .hbase.pb.ServerInfo server_info = 1;</code>
21213        */
21214       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfoOrBuilder getServerInfoOrBuilder() {
21215         if (serverInfoBuilder_ != null) {
21216           return serverInfoBuilder_.getMessageOrBuilder();
21217         } else {
21218           return serverInfo_;
21219         }
21220       }
21221       /**
21222        * <code>required .hbase.pb.ServerInfo server_info = 1;</code>
21223        */
21224       private com.google.protobuf.SingleFieldBuilder<
21225           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfoOrBuilder> 
21226           getServerInfoFieldBuilder() {
21227         if (serverInfoBuilder_ == null) {
21228           serverInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder<
21229               org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfoOrBuilder>(
21230                   serverInfo_,
21231                   getParentForChildren(),
21232                   isClean());
21233           serverInfo_ = null;
21234         }
21235         return serverInfoBuilder_;
21236       }
21237 
21238       // @@protoc_insertion_point(builder_scope:hbase.pb.GetServerInfoResponse)
21239     }
21240 
21241     static {
21242       defaultInstance = new GetServerInfoResponse(true);
21243       defaultInstance.initFields();
21244     }
21245 
21246     // @@protoc_insertion_point(class_scope:hbase.pb.GetServerInfoResponse)
21247   }
21248 
21249   public interface UpdateConfigurationRequestOrBuilder
21250       extends com.google.protobuf.MessageOrBuilder {
21251   }
21252   /**
21253    * Protobuf type {@code hbase.pb.UpdateConfigurationRequest}
21254    */
21255   public static final class UpdateConfigurationRequest extends
21256       com.google.protobuf.GeneratedMessage
21257       implements UpdateConfigurationRequestOrBuilder {
21258     // Use UpdateConfigurationRequest.newBuilder() to construct.
21259     private UpdateConfigurationRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
21260       super(builder);
21261       this.unknownFields = builder.getUnknownFields();
21262     }
21263     private UpdateConfigurationRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
21264 
21265     private static final UpdateConfigurationRequest defaultInstance;
21266     public static UpdateConfigurationRequest getDefaultInstance() {
21267       return defaultInstance;
21268     }
21269 
21270     public UpdateConfigurationRequest getDefaultInstanceForType() {
21271       return defaultInstance;
21272     }
21273 
21274     private final com.google.protobuf.UnknownFieldSet unknownFields;
21275     @java.lang.Override
21276     public final com.google.protobuf.UnknownFieldSet
21277         getUnknownFields() {
21278       return this.unknownFields;
21279     }
21280     private UpdateConfigurationRequest(
21281         com.google.protobuf.CodedInputStream input,
21282         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21283         throws com.google.protobuf.InvalidProtocolBufferException {
21284       initFields();
21285       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
21286           com.google.protobuf.UnknownFieldSet.newBuilder();
21287       try {
21288         boolean done = false;
21289         while (!done) {
21290           int tag = input.readTag();
21291           switch (tag) {
21292             case 0:
21293               done = true;
21294               break;
21295             default: {
21296               if (!parseUnknownField(input, unknownFields,
21297                                      extensionRegistry, tag)) {
21298                 done = true;
21299               }
21300               break;
21301             }
21302           }
21303         }
21304       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
21305         throw e.setUnfinishedMessage(this);
21306       } catch (java.io.IOException e) {
21307         throw new com.google.protobuf.InvalidProtocolBufferException(
21308             e.getMessage()).setUnfinishedMessage(this);
21309       } finally {
21310         this.unknownFields = unknownFields.build();
21311         makeExtensionsImmutable();
21312       }
21313     }
21314     public static final com.google.protobuf.Descriptors.Descriptor
21315         getDescriptor() {
21316       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_UpdateConfigurationRequest_descriptor;
21317     }
21318 
21319     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
21320         internalGetFieldAccessorTable() {
21321       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_UpdateConfigurationRequest_fieldAccessorTable
21322           .ensureFieldAccessorsInitialized(
21323               org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest.Builder.class);
21324     }
21325 
21326     public static com.google.protobuf.Parser<UpdateConfigurationRequest> PARSER =
21327         new com.google.protobuf.AbstractParser<UpdateConfigurationRequest>() {
21328       public UpdateConfigurationRequest parsePartialFrom(
21329           com.google.protobuf.CodedInputStream input,
21330           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21331           throws com.google.protobuf.InvalidProtocolBufferException {
21332         return new UpdateConfigurationRequest(input, extensionRegistry);
21333       }
21334     };
21335 
21336     @java.lang.Override
21337     public com.google.protobuf.Parser<UpdateConfigurationRequest> getParserForType() {
21338       return PARSER;
21339     }
21340 
21341     private void initFields() {
21342     }
21343     private byte memoizedIsInitialized = -1;
21344     public final boolean isInitialized() {
21345       byte isInitialized = memoizedIsInitialized;
21346       if (isInitialized != -1) return isInitialized == 1;
21347 
21348       memoizedIsInitialized = 1;
21349       return true;
21350     }
21351 
21352     public void writeTo(com.google.protobuf.CodedOutputStream output)
21353                         throws java.io.IOException {
21354       getSerializedSize();
21355       getUnknownFields().writeTo(output);
21356     }
21357 
21358     private int memoizedSerializedSize = -1;
21359     public int getSerializedSize() {
21360       int size = memoizedSerializedSize;
21361       if (size != -1) return size;
21362 
21363       size = 0;
21364       size += getUnknownFields().getSerializedSize();
21365       memoizedSerializedSize = size;
21366       return size;
21367     }
21368 
21369     private static final long serialVersionUID = 0L;
21370     @java.lang.Override
21371     protected java.lang.Object writeReplace()
21372         throws java.io.ObjectStreamException {
21373       return super.writeReplace();
21374     }
21375 
21376     @java.lang.Override
21377     public boolean equals(final java.lang.Object obj) {
21378       if (obj == this) {
21379        return true;
21380       }
21381       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest)) {
21382         return super.equals(obj);
21383       }
21384       org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest) obj;
21385 
21386       boolean result = true;
21387       result = result &&
21388           getUnknownFields().equals(other.getUnknownFields());
21389       return result;
21390     }
21391 
21392     private int memoizedHashCode = 0;
21393     @java.lang.Override
21394     public int hashCode() {
21395       if (memoizedHashCode != 0) {
21396         return memoizedHashCode;
21397       }
21398       int hash = 41;
21399       hash = (19 * hash) + getDescriptorForType().hashCode();
21400       hash = (29 * hash) + getUnknownFields().hashCode();
21401       memoizedHashCode = hash;
21402       return hash;
21403     }
21404 
21405     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest parseFrom(
21406         com.google.protobuf.ByteString data)
21407         throws com.google.protobuf.InvalidProtocolBufferException {
21408       return PARSER.parseFrom(data);
21409     }
21410     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest parseFrom(
21411         com.google.protobuf.ByteString data,
21412         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21413         throws com.google.protobuf.InvalidProtocolBufferException {
21414       return PARSER.parseFrom(data, extensionRegistry);
21415     }
21416     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest parseFrom(byte[] data)
21417         throws com.google.protobuf.InvalidProtocolBufferException {
21418       return PARSER.parseFrom(data);
21419     }
21420     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest parseFrom(
21421         byte[] data,
21422         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21423         throws com.google.protobuf.InvalidProtocolBufferException {
21424       return PARSER.parseFrom(data, extensionRegistry);
21425     }
21426     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest parseFrom(java.io.InputStream input)
21427         throws java.io.IOException {
21428       return PARSER.parseFrom(input);
21429     }
21430     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest parseFrom(
21431         java.io.InputStream input,
21432         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21433         throws java.io.IOException {
21434       return PARSER.parseFrom(input, extensionRegistry);
21435     }
21436     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest parseDelimitedFrom(java.io.InputStream input)
21437         throws java.io.IOException {
21438       return PARSER.parseDelimitedFrom(input);
21439     }
21440     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest parseDelimitedFrom(
21441         java.io.InputStream input,
21442         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21443         throws java.io.IOException {
21444       return PARSER.parseDelimitedFrom(input, extensionRegistry);
21445     }
21446     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest parseFrom(
21447         com.google.protobuf.CodedInputStream input)
21448         throws java.io.IOException {
21449       return PARSER.parseFrom(input);
21450     }
21451     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest parseFrom(
21452         com.google.protobuf.CodedInputStream input,
21453         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21454         throws java.io.IOException {
21455       return PARSER.parseFrom(input, extensionRegistry);
21456     }
21457 
21458     public static Builder newBuilder() { return Builder.create(); }
21459     public Builder newBuilderForType() { return newBuilder(); }
21460     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest prototype) {
21461       return newBuilder().mergeFrom(prototype);
21462     }
21463     public Builder toBuilder() { return newBuilder(this); }
21464 
21465     @java.lang.Override
21466     protected Builder newBuilderForType(
21467         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
21468       Builder builder = new Builder(parent);
21469       return builder;
21470     }
21471     /**
21472      * Protobuf type {@code hbase.pb.UpdateConfigurationRequest}
21473      */
21474     public static final class Builder extends
21475         com.google.protobuf.GeneratedMessage.Builder<Builder>
21476        implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequestOrBuilder {
21477       public static final com.google.protobuf.Descriptors.Descriptor
21478           getDescriptor() {
21479         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_UpdateConfigurationRequest_descriptor;
21480       }
21481 
21482       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
21483           internalGetFieldAccessorTable() {
21484         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_UpdateConfigurationRequest_fieldAccessorTable
21485             .ensureFieldAccessorsInitialized(
21486                 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest.Builder.class);
21487       }
21488 
21489       // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest.newBuilder()
21490       private Builder() {
21491         maybeForceBuilderInitialization();
21492       }
21493 
21494       private Builder(
21495           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
21496         super(parent);
21497         maybeForceBuilderInitialization();
21498       }
21499       private void maybeForceBuilderInitialization() {
21500         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
21501         }
21502       }
21503       private static Builder create() {
21504         return new Builder();
21505       }
21506 
21507       public Builder clear() {
21508         super.clear();
21509         return this;
21510       }
21511 
21512       public Builder clone() {
21513         return create().mergeFrom(buildPartial());
21514       }
21515 
21516       public com.google.protobuf.Descriptors.Descriptor
21517           getDescriptorForType() {
21518         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_UpdateConfigurationRequest_descriptor;
21519       }
21520 
21521       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest getDefaultInstanceForType() {
21522         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest.getDefaultInstance();
21523       }
21524 
21525       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest build() {
21526         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest result = buildPartial();
21527         if (!result.isInitialized()) {
21528           throw newUninitializedMessageException(result);
21529         }
21530         return result;
21531       }
21532 
21533       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest buildPartial() {
21534         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest(this);
21535         onBuilt();
21536         return result;
21537       }
21538 
21539       public Builder mergeFrom(com.google.protobuf.Message other) {
21540         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest) {
21541           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest)other);
21542         } else {
21543           super.mergeFrom(other);
21544           return this;
21545         }
21546       }
21547 
21548       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest other) {
21549         if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest.getDefaultInstance()) return this;
21550         this.mergeUnknownFields(other.getUnknownFields());
21551         return this;
21552       }
21553 
21554       public final boolean isInitialized() {
21555         return true;
21556       }
21557 
21558       public Builder mergeFrom(
21559           com.google.protobuf.CodedInputStream input,
21560           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21561           throws java.io.IOException {
21562         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest parsedMessage = null;
21563         try {
21564           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
21565         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
21566           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest) e.getUnfinishedMessage();
21567           throw e;
21568         } finally {
21569           if (parsedMessage != null) {
21570             mergeFrom(parsedMessage);
21571           }
21572         }
21573         return this;
21574       }
21575 
21576       // @@protoc_insertion_point(builder_scope:hbase.pb.UpdateConfigurationRequest)
21577     }
21578 
21579     static {
21580       defaultInstance = new UpdateConfigurationRequest(true);
21581       defaultInstance.initFields();
21582     }
21583 
21584     // @@protoc_insertion_point(class_scope:hbase.pb.UpdateConfigurationRequest)
21585   }
21586 
21587   public interface UpdateConfigurationResponseOrBuilder
21588       extends com.google.protobuf.MessageOrBuilder {
21589   }
21590   /**
21591    * Protobuf type {@code hbase.pb.UpdateConfigurationResponse}
21592    */
21593   public static final class UpdateConfigurationResponse extends
21594       com.google.protobuf.GeneratedMessage
21595       implements UpdateConfigurationResponseOrBuilder {
21596     // Use UpdateConfigurationResponse.newBuilder() to construct.
21597     private UpdateConfigurationResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
21598       super(builder);
21599       this.unknownFields = builder.getUnknownFields();
21600     }
21601     private UpdateConfigurationResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
21602 
21603     private static final UpdateConfigurationResponse defaultInstance;
21604     public static UpdateConfigurationResponse getDefaultInstance() {
21605       return defaultInstance;
21606     }
21607 
21608     public UpdateConfigurationResponse getDefaultInstanceForType() {
21609       return defaultInstance;
21610     }
21611 
21612     private final com.google.protobuf.UnknownFieldSet unknownFields;
21613     @java.lang.Override
21614     public final com.google.protobuf.UnknownFieldSet
21615         getUnknownFields() {
21616       return this.unknownFields;
21617     }
21618     private UpdateConfigurationResponse(
21619         com.google.protobuf.CodedInputStream input,
21620         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21621         throws com.google.protobuf.InvalidProtocolBufferException {
21622       initFields();
21623       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
21624           com.google.protobuf.UnknownFieldSet.newBuilder();
21625       try {
21626         boolean done = false;
21627         while (!done) {
21628           int tag = input.readTag();
21629           switch (tag) {
21630             case 0:
21631               done = true;
21632               break;
21633             default: {
21634               if (!parseUnknownField(input, unknownFields,
21635                                      extensionRegistry, tag)) {
21636                 done = true;
21637               }
21638               break;
21639             }
21640           }
21641         }
21642       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
21643         throw e.setUnfinishedMessage(this);
21644       } catch (java.io.IOException e) {
21645         throw new com.google.protobuf.InvalidProtocolBufferException(
21646             e.getMessage()).setUnfinishedMessage(this);
21647       } finally {
21648         this.unknownFields = unknownFields.build();
21649         makeExtensionsImmutable();
21650       }
21651     }
21652     public static final com.google.protobuf.Descriptors.Descriptor
21653         getDescriptor() {
21654       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_UpdateConfigurationResponse_descriptor;
21655     }
21656 
21657     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
21658         internalGetFieldAccessorTable() {
21659       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_UpdateConfigurationResponse_fieldAccessorTable
21660           .ensureFieldAccessorsInitialized(
21661               org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse.Builder.class);
21662     }
21663 
21664     public static com.google.protobuf.Parser<UpdateConfigurationResponse> PARSER =
21665         new com.google.protobuf.AbstractParser<UpdateConfigurationResponse>() {
21666       public UpdateConfigurationResponse parsePartialFrom(
21667           com.google.protobuf.CodedInputStream input,
21668           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21669           throws com.google.protobuf.InvalidProtocolBufferException {
21670         return new UpdateConfigurationResponse(input, extensionRegistry);
21671       }
21672     };
21673 
21674     @java.lang.Override
21675     public com.google.protobuf.Parser<UpdateConfigurationResponse> getParserForType() {
21676       return PARSER;
21677     }
21678 
21679     private void initFields() {
21680     }
21681     private byte memoizedIsInitialized = -1;
21682     public final boolean isInitialized() {
21683       byte isInitialized = memoizedIsInitialized;
21684       if (isInitialized != -1) return isInitialized == 1;
21685 
21686       memoizedIsInitialized = 1;
21687       return true;
21688     }
21689 
21690     public void writeTo(com.google.protobuf.CodedOutputStream output)
21691                         throws java.io.IOException {
21692       getSerializedSize();
21693       getUnknownFields().writeTo(output);
21694     }
21695 
21696     private int memoizedSerializedSize = -1;
21697     public int getSerializedSize() {
21698       int size = memoizedSerializedSize;
21699       if (size != -1) return size;
21700 
21701       size = 0;
21702       size += getUnknownFields().getSerializedSize();
21703       memoizedSerializedSize = size;
21704       return size;
21705     }
21706 
21707     private static final long serialVersionUID = 0L;
21708     @java.lang.Override
21709     protected java.lang.Object writeReplace()
21710         throws java.io.ObjectStreamException {
21711       return super.writeReplace();
21712     }
21713 
21714     @java.lang.Override
21715     public boolean equals(final java.lang.Object obj) {
21716       if (obj == this) {
21717        return true;
21718       }
21719       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse)) {
21720         return super.equals(obj);
21721       }
21722       org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse) obj;
21723 
21724       boolean result = true;
21725       result = result &&
21726           getUnknownFields().equals(other.getUnknownFields());
21727       return result;
21728     }
21729 
21730     private int memoizedHashCode = 0;
21731     @java.lang.Override
21732     public int hashCode() {
21733       if (memoizedHashCode != 0) {
21734         return memoizedHashCode;
21735       }
21736       int hash = 41;
21737       hash = (19 * hash) + getDescriptorForType().hashCode();
21738       hash = (29 * hash) + getUnknownFields().hashCode();
21739       memoizedHashCode = hash;
21740       return hash;
21741     }
21742 
21743     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse parseFrom(
21744         com.google.protobuf.ByteString data)
21745         throws com.google.protobuf.InvalidProtocolBufferException {
21746       return PARSER.parseFrom(data);
21747     }
21748     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse parseFrom(
21749         com.google.protobuf.ByteString data,
21750         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21751         throws com.google.protobuf.InvalidProtocolBufferException {
21752       return PARSER.parseFrom(data, extensionRegistry);
21753     }
21754     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse parseFrom(byte[] data)
21755         throws com.google.protobuf.InvalidProtocolBufferException {
21756       return PARSER.parseFrom(data);
21757     }
21758     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse parseFrom(
21759         byte[] data,
21760         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21761         throws com.google.protobuf.InvalidProtocolBufferException {
21762       return PARSER.parseFrom(data, extensionRegistry);
21763     }
21764     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse parseFrom(java.io.InputStream input)
21765         throws java.io.IOException {
21766       return PARSER.parseFrom(input);
21767     }
21768     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse parseFrom(
21769         java.io.InputStream input,
21770         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21771         throws java.io.IOException {
21772       return PARSER.parseFrom(input, extensionRegistry);
21773     }
21774     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse parseDelimitedFrom(java.io.InputStream input)
21775         throws java.io.IOException {
21776       return PARSER.parseDelimitedFrom(input);
21777     }
21778     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse parseDelimitedFrom(
21779         java.io.InputStream input,
21780         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21781         throws java.io.IOException {
21782       return PARSER.parseDelimitedFrom(input, extensionRegistry);
21783     }
21784     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse parseFrom(
21785         com.google.protobuf.CodedInputStream input)
21786         throws java.io.IOException {
21787       return PARSER.parseFrom(input);
21788     }
21789     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse parseFrom(
21790         com.google.protobuf.CodedInputStream input,
21791         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21792         throws java.io.IOException {
21793       return PARSER.parseFrom(input, extensionRegistry);
21794     }
21795 
21796     public static Builder newBuilder() { return Builder.create(); }
21797     public Builder newBuilderForType() { return newBuilder(); }
21798     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse prototype) {
21799       return newBuilder().mergeFrom(prototype);
21800     }
21801     public Builder toBuilder() { return newBuilder(this); }
21802 
21803     @java.lang.Override
21804     protected Builder newBuilderForType(
21805         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
21806       Builder builder = new Builder(parent);
21807       return builder;
21808     }
21809     /**
21810      * Protobuf type {@code hbase.pb.UpdateConfigurationResponse}
21811      */
21812     public static final class Builder extends
21813         com.google.protobuf.GeneratedMessage.Builder<Builder>
21814        implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponseOrBuilder {
21815       public static final com.google.protobuf.Descriptors.Descriptor
21816           getDescriptor() {
21817         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_UpdateConfigurationResponse_descriptor;
21818       }
21819 
21820       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
21821           internalGetFieldAccessorTable() {
21822         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_UpdateConfigurationResponse_fieldAccessorTable
21823             .ensureFieldAccessorsInitialized(
21824                 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse.Builder.class);
21825       }
21826 
21827       // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse.newBuilder()
21828       private Builder() {
21829         maybeForceBuilderInitialization();
21830       }
21831 
21832       private Builder(
21833           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
21834         super(parent);
21835         maybeForceBuilderInitialization();
21836       }
21837       private void maybeForceBuilderInitialization() {
21838         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
21839         }
21840       }
21841       private static Builder create() {
21842         return new Builder();
21843       }
21844 
21845       public Builder clear() {
21846         super.clear();
21847         return this;
21848       }
21849 
21850       public Builder clone() {
21851         return create().mergeFrom(buildPartial());
21852       }
21853 
21854       public com.google.protobuf.Descriptors.Descriptor
21855           getDescriptorForType() {
21856         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_hbase_pb_UpdateConfigurationResponse_descriptor;
21857       }
21858 
21859       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse getDefaultInstanceForType() {
21860         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse.getDefaultInstance();
21861       }
21862 
21863       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse build() {
21864         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse result = buildPartial();
21865         if (!result.isInitialized()) {
21866           throw newUninitializedMessageException(result);
21867         }
21868         return result;
21869       }
21870 
21871       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse buildPartial() {
21872         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse(this);
21873         onBuilt();
21874         return result;
21875       }
21876 
21877       public Builder mergeFrom(com.google.protobuf.Message other) {
21878         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse) {
21879           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse)other);
21880         } else {
21881           super.mergeFrom(other);
21882           return this;
21883         }
21884       }
21885 
21886       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse other) {
21887         if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse.getDefaultInstance()) return this;
21888         this.mergeUnknownFields(other.getUnknownFields());
21889         return this;
21890       }
21891 
21892       public final boolean isInitialized() {
21893         return true;
21894       }
21895 
21896       public Builder mergeFrom(
21897           com.google.protobuf.CodedInputStream input,
21898           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21899           throws java.io.IOException {
21900         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse parsedMessage = null;
21901         try {
21902           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
21903         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
21904           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse) e.getUnfinishedMessage();
21905           throw e;
21906         } finally {
21907           if (parsedMessage != null) {
21908             mergeFrom(parsedMessage);
21909           }
21910         }
21911         return this;
21912       }
21913 
21914       // @@protoc_insertion_point(builder_scope:hbase.pb.UpdateConfigurationResponse)
21915     }
21916 
21917     static {
21918       defaultInstance = new UpdateConfigurationResponse(true);
21919       defaultInstance.initFields();
21920     }
21921 
21922     // @@protoc_insertion_point(class_scope:hbase.pb.UpdateConfigurationResponse)
21923   }
21924 
21925   /**
21926    * Protobuf service {@code hbase.pb.AdminService}
21927    */
21928   public static abstract class AdminService
21929       implements com.google.protobuf.Service {
21930     protected AdminService() {}
21931 
21932     public interface Interface {
21933       /**
21934        * <code>rpc GetRegionInfo(.hbase.pb.GetRegionInfoRequest) returns (.hbase.pb.GetRegionInfoResponse);</code>
21935        */
21936       public abstract void getRegionInfo(
21937           com.google.protobuf.RpcController controller,
21938           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest request,
21939           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse> done);
21940 
21941       /**
21942        * <code>rpc GetStoreFile(.hbase.pb.GetStoreFileRequest) returns (.hbase.pb.GetStoreFileResponse);</code>
21943        */
21944       public abstract void getStoreFile(
21945           com.google.protobuf.RpcController controller,
21946           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest request,
21947           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse> done);
21948 
21949       /**
21950        * <code>rpc GetOnlineRegion(.hbase.pb.GetOnlineRegionRequest) returns (.hbase.pb.GetOnlineRegionResponse);</code>
21951        */
21952       public abstract void getOnlineRegion(
21953           com.google.protobuf.RpcController controller,
21954           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest request,
21955           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse> done);
21956 
21957       /**
21958        * <code>rpc OpenRegion(.hbase.pb.OpenRegionRequest) returns (.hbase.pb.OpenRegionResponse);</code>
21959        */
21960       public abstract void openRegion(
21961           com.google.protobuf.RpcController controller,
21962           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest request,
21963           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse> done);
21964 
21965       /**
21966        * <code>rpc WarmupRegion(.hbase.pb.WarmupRegionRequest) returns (.hbase.pb.WarmupRegionResponse);</code>
21967        */
21968       public abstract void warmupRegion(
21969           com.google.protobuf.RpcController controller,
21970           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest request,
21971           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse> done);
21972 
21973       /**
21974        * <code>rpc CloseRegion(.hbase.pb.CloseRegionRequest) returns (.hbase.pb.CloseRegionResponse);</code>
21975        */
21976       public abstract void closeRegion(
21977           com.google.protobuf.RpcController controller,
21978           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest request,
21979           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse> done);
21980 
21981       /**
21982        * <code>rpc FlushRegion(.hbase.pb.FlushRegionRequest) returns (.hbase.pb.FlushRegionResponse);</code>
21983        */
21984       public abstract void flushRegion(
21985           com.google.protobuf.RpcController controller,
21986           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest request,
21987           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse> done);
21988 
21989       /**
21990        * <code>rpc SplitRegion(.hbase.pb.SplitRegionRequest) returns (.hbase.pb.SplitRegionResponse);</code>
21991        */
21992       public abstract void splitRegion(
21993           com.google.protobuf.RpcController controller,
21994           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest request,
21995           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse> done);
21996 
21997       /**
21998        * <code>rpc CompactRegion(.hbase.pb.CompactRegionRequest) returns (.hbase.pb.CompactRegionResponse);</code>
21999        */
22000       public abstract void compactRegion(
22001           com.google.protobuf.RpcController controller,
22002           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest request,
22003           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse> done);
22004 
22005       /**
22006        * <code>rpc MergeRegions(.hbase.pb.MergeRegionsRequest) returns (.hbase.pb.MergeRegionsResponse);</code>
22007        */
22008       public abstract void mergeRegions(
22009           com.google.protobuf.RpcController controller,
22010           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest request,
22011           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse> done);
22012 
22013       /**
22014        * <code>rpc ReplicateWALEntry(.hbase.pb.ReplicateWALEntryRequest) returns (.hbase.pb.ReplicateWALEntryResponse);</code>
22015        */
22016       public abstract void replicateWALEntry(
22017           com.google.protobuf.RpcController controller,
22018           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest request,
22019           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse> done);
22020 
22021       /**
22022        * <code>rpc Replay(.hbase.pb.ReplicateWALEntryRequest) returns (.hbase.pb.ReplicateWALEntryResponse);</code>
22023        */
22024       public abstract void replay(
22025           com.google.protobuf.RpcController controller,
22026           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest request,
22027           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse> done);
22028 
22029       /**
22030        * <code>rpc RollWALWriter(.hbase.pb.RollWALWriterRequest) returns (.hbase.pb.RollWALWriterResponse);</code>
22031        */
22032       public abstract void rollWALWriter(
22033           com.google.protobuf.RpcController controller,
22034           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest request,
22035           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse> done);
22036 
22037       /**
22038        * <code>rpc GetServerInfo(.hbase.pb.GetServerInfoRequest) returns (.hbase.pb.GetServerInfoResponse);</code>
22039        */
22040       public abstract void getServerInfo(
22041           com.google.protobuf.RpcController controller,
22042           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest request,
22043           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse> done);
22044 
22045       /**
22046        * <code>rpc StopServer(.hbase.pb.StopServerRequest) returns (.hbase.pb.StopServerResponse);</code>
22047        */
22048       public abstract void stopServer(
22049           com.google.protobuf.RpcController controller,
22050           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest request,
22051           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse> done);
22052 
22053       /**
22054        * <code>rpc UpdateFavoredNodes(.hbase.pb.UpdateFavoredNodesRequest) returns (.hbase.pb.UpdateFavoredNodesResponse);</code>
22055        */
22056       public abstract void updateFavoredNodes(
22057           com.google.protobuf.RpcController controller,
22058           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest request,
22059           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse> done);
22060 
22061       /**
22062        * <code>rpc UpdateConfiguration(.hbase.pb.UpdateConfigurationRequest) returns (.hbase.pb.UpdateConfigurationResponse);</code>
22063        */
22064       public abstract void updateConfiguration(
22065           com.google.protobuf.RpcController controller,
22066           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest request,
22067           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse> done);
22068 
22069     }
22070 
22071     public static com.google.protobuf.Service newReflectiveService(
22072         final Interface impl) {
22073       return new AdminService() {
22074         @java.lang.Override
22075         public  void getRegionInfo(
22076             com.google.protobuf.RpcController controller,
22077             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest request,
22078             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse> done) {
22079           impl.getRegionInfo(controller, request, done);
22080         }
22081 
22082         @java.lang.Override
22083         public  void getStoreFile(
22084             com.google.protobuf.RpcController controller,
22085             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest request,
22086             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse> done) {
22087           impl.getStoreFile(controller, request, done);
22088         }
22089 
22090         @java.lang.Override
22091         public  void getOnlineRegion(
22092             com.google.protobuf.RpcController controller,
22093             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest request,
22094             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse> done) {
22095           impl.getOnlineRegion(controller, request, done);
22096         }
22097 
22098         @java.lang.Override
22099         public  void openRegion(
22100             com.google.protobuf.RpcController controller,
22101             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest request,
22102             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse> done) {
22103           impl.openRegion(controller, request, done);
22104         }
22105 
22106         @java.lang.Override
22107         public  void warmupRegion(
22108             com.google.protobuf.RpcController controller,
22109             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest request,
22110             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse> done) {
22111           impl.warmupRegion(controller, request, done);
22112         }
22113 
22114         @java.lang.Override
22115         public  void closeRegion(
22116             com.google.protobuf.RpcController controller,
22117             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest request,
22118             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse> done) {
22119           impl.closeRegion(controller, request, done);
22120         }
22121 
22122         @java.lang.Override
22123         public  void flushRegion(
22124             com.google.protobuf.RpcController controller,
22125             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest request,
22126             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse> done) {
22127           impl.flushRegion(controller, request, done);
22128         }
22129 
22130         @java.lang.Override
22131         public  void splitRegion(
22132             com.google.protobuf.RpcController controller,
22133             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest request,
22134             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse> done) {
22135           impl.splitRegion(controller, request, done);
22136         }
22137 
22138         @java.lang.Override
22139         public  void compactRegion(
22140             com.google.protobuf.RpcController controller,
22141             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest request,
22142             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse> done) {
22143           impl.compactRegion(controller, request, done);
22144         }
22145 
22146         @java.lang.Override
22147         public  void mergeRegions(
22148             com.google.protobuf.RpcController controller,
22149             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest request,
22150             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse> done) {
22151           impl.mergeRegions(controller, request, done);
22152         }
22153 
22154         @java.lang.Override
22155         public  void replicateWALEntry(
22156             com.google.protobuf.RpcController controller,
22157             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest request,
22158             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse> done) {
22159           impl.replicateWALEntry(controller, request, done);
22160         }
22161 
22162         @java.lang.Override
22163         public  void replay(
22164             com.google.protobuf.RpcController controller,
22165             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest request,
22166             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse> done) {
22167           impl.replay(controller, request, done);
22168         }
22169 
22170         @java.lang.Override
22171         public  void rollWALWriter(
22172             com.google.protobuf.RpcController controller,
22173             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest request,
22174             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse> done) {
22175           impl.rollWALWriter(controller, request, done);
22176         }
22177 
22178         @java.lang.Override
22179         public  void getServerInfo(
22180             com.google.protobuf.RpcController controller,
22181             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest request,
22182             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse> done) {
22183           impl.getServerInfo(controller, request, done);
22184         }
22185 
22186         @java.lang.Override
22187         public  void stopServer(
22188             com.google.protobuf.RpcController controller,
22189             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest request,
22190             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse> done) {
22191           impl.stopServer(controller, request, done);
22192         }
22193 
22194         @java.lang.Override
22195         public  void updateFavoredNodes(
22196             com.google.protobuf.RpcController controller,
22197             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest request,
22198             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse> done) {
22199           impl.updateFavoredNodes(controller, request, done);
22200         }
22201 
22202         @java.lang.Override
22203         public  void updateConfiguration(
22204             com.google.protobuf.RpcController controller,
22205             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest request,
22206             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse> done) {
22207           impl.updateConfiguration(controller, request, done);
22208         }
22209 
22210       };
22211     }
22212 
22213     public static com.google.protobuf.BlockingService
22214         newReflectiveBlockingService(final BlockingInterface impl) {
22215       return new com.google.protobuf.BlockingService() {
22216         public final com.google.protobuf.Descriptors.ServiceDescriptor
22217             getDescriptorForType() {
22218           return getDescriptor();
22219         }
22220 
22221         public final com.google.protobuf.Message callBlockingMethod(
22222             com.google.protobuf.Descriptors.MethodDescriptor method,
22223             com.google.protobuf.RpcController controller,
22224             com.google.protobuf.Message request)
22225             throws com.google.protobuf.ServiceException {
22226           if (method.getService() != getDescriptor()) {
22227             throw new java.lang.IllegalArgumentException(
22228               "Service.callBlockingMethod() given method descriptor for " +
22229               "wrong service type.");
22230           }
22231           switch(method.getIndex()) {
22232             case 0:
22233               return impl.getRegionInfo(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest)request);
22234             case 1:
22235               return impl.getStoreFile(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest)request);
22236             case 2:
22237               return impl.getOnlineRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest)request);
22238             case 3:
22239               return impl.openRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest)request);
22240             case 4:
22241               return impl.warmupRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest)request);
22242             case 5:
22243               return impl.closeRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest)request);
22244             case 6:
22245               return impl.flushRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest)request);
22246             case 7:
22247               return impl.splitRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest)request);
22248             case 8:
22249               return impl.compactRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest)request);
22250             case 9:
22251               return impl.mergeRegions(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest)request);
22252             case 10:
22253               return impl.replicateWALEntry(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest)request);
22254             case 11:
22255               return impl.replay(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest)request);
22256             case 12:
22257               return impl.rollWALWriter(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest)request);
22258             case 13:
22259               return impl.getServerInfo(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest)request);
22260             case 14:
22261               return impl.stopServer(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest)request);
22262             case 15:
22263               return impl.updateFavoredNodes(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest)request);
22264             case 16:
22265               return impl.updateConfiguration(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest)request);
22266             default:
22267               throw new java.lang.AssertionError("Can't get here.");
22268           }
22269         }
22270 
22271         public final com.google.protobuf.Message
22272             getRequestPrototype(
22273             com.google.protobuf.Descriptors.MethodDescriptor method) {
22274           if (method.getService() != getDescriptor()) {
22275             throw new java.lang.IllegalArgumentException(
22276               "Service.getRequestPrototype() given method " +
22277               "descriptor for wrong service type.");
22278           }
22279           switch(method.getIndex()) {
22280             case 0:
22281               return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.getDefaultInstance();
22282             case 1:
22283               return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.getDefaultInstance();
22284             case 2:
22285               return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.getDefaultInstance();
22286             case 3:
22287               return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.getDefaultInstance();
22288             case 4:
22289               return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest.getDefaultInstance();
22290             case 5:
22291               return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.getDefaultInstance();
22292             case 6:
22293               return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.getDefaultInstance();
22294             case 7:
22295               return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.getDefaultInstance();
22296             case 8:
22297               return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.getDefaultInstance();
22298             case 9:
22299               return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest.getDefaultInstance();
22300             case 10:
22301               return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.getDefaultInstance();
22302             case 11:
22303               return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.getDefaultInstance();
22304             case 12:
22305               return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.getDefaultInstance();
22306             case 13:
22307               return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.getDefaultInstance();
22308             case 14:
22309               return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.getDefaultInstance();
22310             case 15:
22311               return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.getDefaultInstance();
22312             case 16:
22313               return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest.getDefaultInstance();
22314             default:
22315               throw new java.lang.AssertionError("Can't get here.");
22316           }
22317         }
22318 
22319         public final com.google.protobuf.Message
22320             getResponsePrototype(
22321             com.google.protobuf.Descriptors.MethodDescriptor method) {
22322           if (method.getService() != getDescriptor()) {
22323             throw new java.lang.IllegalArgumentException(
22324               "Service.getResponsePrototype() given method " +
22325               "descriptor for wrong service type.");
22326           }
22327           switch(method.getIndex()) {
22328             case 0:
22329               return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDefaultInstance();
22330             case 1:
22331               return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.getDefaultInstance();
22332             case 2:
22333               return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDefaultInstance();
22334             case 3:
22335               return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDefaultInstance();
22336             case 4:
22337               return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse.getDefaultInstance();
22338             case 5:
22339               return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDefaultInstance();
22340             case 6:
22341               return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDefaultInstance();
22342             case 7:
22343               return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.getDefaultInstance();
22344             case 8:
22345               return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.getDefaultInstance();
22346             case 9:
22347               return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse.getDefaultInstance();
22348             case 10:
22349               return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDefaultInstance();
22350             case 11:
22351               return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDefaultInstance();
22352             case 12:
22353               return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.getDefaultInstance();
22354             case 13:
22355               return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.getDefaultInstance();
22356             case 14:
22357               return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDefaultInstance();
22358             case 15:
22359               return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse.getDefaultInstance();
22360             case 16:
22361               return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse.getDefaultInstance();
22362             default:
22363               throw new java.lang.AssertionError("Can't get here.");
22364           }
22365         }
22366 
22367       };
22368     }
22369 
22370     /**
22371      * <code>rpc GetRegionInfo(.hbase.pb.GetRegionInfoRequest) returns (.hbase.pb.GetRegionInfoResponse);</code>
22372      */
22373     public abstract void getRegionInfo(
22374         com.google.protobuf.RpcController controller,
22375         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest request,
22376         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse> done);
22377 
22378     /**
22379      * <code>rpc GetStoreFile(.hbase.pb.GetStoreFileRequest) returns (.hbase.pb.GetStoreFileResponse);</code>
22380      */
22381     public abstract void getStoreFile(
22382         com.google.protobuf.RpcController controller,
22383         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest request,
22384         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse> done);
22385 
22386     /**
22387      * <code>rpc GetOnlineRegion(.hbase.pb.GetOnlineRegionRequest) returns (.hbase.pb.GetOnlineRegionResponse);</code>
22388      */
22389     public abstract void getOnlineRegion(
22390         com.google.protobuf.RpcController controller,
22391         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest request,
22392         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse> done);
22393 
22394     /**
22395      * <code>rpc OpenRegion(.hbase.pb.OpenRegionRequest) returns (.hbase.pb.OpenRegionResponse);</code>
22396      */
22397     public abstract void openRegion(
22398         com.google.protobuf.RpcController controller,
22399         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest request,
22400         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse> done);
22401 
22402     /**
22403      * <code>rpc WarmupRegion(.hbase.pb.WarmupRegionRequest) returns (.hbase.pb.WarmupRegionResponse);</code>
22404      */
22405     public abstract void warmupRegion(
22406         com.google.protobuf.RpcController controller,
22407         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest request,
22408         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse> done);
22409 
22410     /**
22411      * <code>rpc CloseRegion(.hbase.pb.CloseRegionRequest) returns (.hbase.pb.CloseRegionResponse);</code>
22412      */
22413     public abstract void closeRegion(
22414         com.google.protobuf.RpcController controller,
22415         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest request,
22416         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse> done);
22417 
22418     /**
22419      * <code>rpc FlushRegion(.hbase.pb.FlushRegionRequest) returns (.hbase.pb.FlushRegionResponse);</code>
22420      */
22421     public abstract void flushRegion(
22422         com.google.protobuf.RpcController controller,
22423         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest request,
22424         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse> done);
22425 
22426     /**
22427      * <code>rpc SplitRegion(.hbase.pb.SplitRegionRequest) returns (.hbase.pb.SplitRegionResponse);</code>
22428      */
22429     public abstract void splitRegion(
22430         com.google.protobuf.RpcController controller,
22431         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest request,
22432         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse> done);
22433 
22434     /**
22435      * <code>rpc CompactRegion(.hbase.pb.CompactRegionRequest) returns (.hbase.pb.CompactRegionResponse);</code>
22436      */
22437     public abstract void compactRegion(
22438         com.google.protobuf.RpcController controller,
22439         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest request,
22440         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse> done);
22441 
22442     /**
22443      * <code>rpc MergeRegions(.hbase.pb.MergeRegionsRequest) returns (.hbase.pb.MergeRegionsResponse);</code>
22444      */
22445     public abstract void mergeRegions(
22446         com.google.protobuf.RpcController controller,
22447         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest request,
22448         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse> done);
22449 
22450     /**
22451      * <code>rpc ReplicateWALEntry(.hbase.pb.ReplicateWALEntryRequest) returns (.hbase.pb.ReplicateWALEntryResponse);</code>
22452      */
22453     public abstract void replicateWALEntry(
22454         com.google.protobuf.RpcController controller,
22455         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest request,
22456         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse> done);
22457 
22458     /**
22459      * <code>rpc Replay(.hbase.pb.ReplicateWALEntryRequest) returns (.hbase.pb.ReplicateWALEntryResponse);</code>
22460      */
22461     public abstract void replay(
22462         com.google.protobuf.RpcController controller,
22463         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest request,
22464         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse> done);
22465 
22466     /**
22467      * <code>rpc RollWALWriter(.hbase.pb.RollWALWriterRequest) returns (.hbase.pb.RollWALWriterResponse);</code>
22468      */
22469     public abstract void rollWALWriter(
22470         com.google.protobuf.RpcController controller,
22471         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest request,
22472         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse> done);
22473 
22474     /**
22475      * <code>rpc GetServerInfo(.hbase.pb.GetServerInfoRequest) returns (.hbase.pb.GetServerInfoResponse);</code>
22476      */
22477     public abstract void getServerInfo(
22478         com.google.protobuf.RpcController controller,
22479         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest request,
22480         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse> done);
22481 
22482     /**
22483      * <code>rpc StopServer(.hbase.pb.StopServerRequest) returns (.hbase.pb.StopServerResponse);</code>
22484      */
22485     public abstract void stopServer(
22486         com.google.protobuf.RpcController controller,
22487         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest request,
22488         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse> done);
22489 
22490     /**
22491      * <code>rpc UpdateFavoredNodes(.hbase.pb.UpdateFavoredNodesRequest) returns (.hbase.pb.UpdateFavoredNodesResponse);</code>
22492      */
22493     public abstract void updateFavoredNodes(
22494         com.google.protobuf.RpcController controller,
22495         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest request,
22496         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse> done);
22497 
22498     /**
22499      * <code>rpc UpdateConfiguration(.hbase.pb.UpdateConfigurationRequest) returns (.hbase.pb.UpdateConfigurationResponse);</code>
22500      */
22501     public abstract void updateConfiguration(
22502         com.google.protobuf.RpcController controller,
22503         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest request,
22504         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse> done);
22505 
22506     public static final
22507         com.google.protobuf.Descriptors.ServiceDescriptor
22508         getDescriptor() {
22509       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.getDescriptor().getServices().get(0);
22510     }
22511     public final com.google.protobuf.Descriptors.ServiceDescriptor
22512         getDescriptorForType() {
22513       return getDescriptor();
22514     }
22515 
22516     public final void callMethod(
22517         com.google.protobuf.Descriptors.MethodDescriptor method,
22518         com.google.protobuf.RpcController controller,
22519         com.google.protobuf.Message request,
22520         com.google.protobuf.RpcCallback<
22521           com.google.protobuf.Message> done) {
22522       if (method.getService() != getDescriptor()) {
22523         throw new java.lang.IllegalArgumentException(
22524           "Service.callMethod() given method descriptor for wrong " +
22525           "service type.");
22526       }
22527       switch(method.getIndex()) {
22528         case 0:
22529           this.getRegionInfo(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest)request,
22530             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse>specializeCallback(
22531               done));
22532           return;
22533         case 1:
22534           this.getStoreFile(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest)request,
22535             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse>specializeCallback(
22536               done));
22537           return;
22538         case 2:
22539           this.getOnlineRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest)request,
22540             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse>specializeCallback(
22541               done));
22542           return;
22543         case 3:
22544           this.openRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest)request,
22545             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse>specializeCallback(
22546               done));
22547           return;
22548         case 4:
22549           this.warmupRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest)request,
22550             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse>specializeCallback(
22551               done));
22552           return;
22553         case 5:
22554           this.closeRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest)request,
22555             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse>specializeCallback(
22556               done));
22557           return;
22558         case 6:
22559           this.flushRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest)request,
22560             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse>specializeCallback(
22561               done));
22562           return;
22563         case 7:
22564           this.splitRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest)request,
22565             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse>specializeCallback(
22566               done));
22567           return;
22568         case 8:
22569           this.compactRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest)request,
22570             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse>specializeCallback(
22571               done));
22572           return;
22573         case 9:
22574           this.mergeRegions(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest)request,
22575             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse>specializeCallback(
22576               done));
22577           return;
22578         case 10:
22579           this.replicateWALEntry(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest)request,
22580             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse>specializeCallback(
22581               done));
22582           return;
22583         case 11:
22584           this.replay(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest)request,
22585             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse>specializeCallback(
22586               done));
22587           return;
22588         case 12:
22589           this.rollWALWriter(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest)request,
22590             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse>specializeCallback(
22591               done));
22592           return;
22593         case 13:
22594           this.getServerInfo(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest)request,
22595             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse>specializeCallback(
22596               done));
22597           return;
22598         case 14:
22599           this.stopServer(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest)request,
22600             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse>specializeCallback(
22601               done));
22602           return;
22603         case 15:
22604           this.updateFavoredNodes(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest)request,
22605             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse>specializeCallback(
22606               done));
22607           return;
22608         case 16:
22609           this.updateConfiguration(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest)request,
22610             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse>specializeCallback(
22611               done));
22612           return;
22613         default:
22614           throw new java.lang.AssertionError("Can't get here.");
22615       }
22616     }
22617 
22618     public final com.google.protobuf.Message
22619         getRequestPrototype(
22620         com.google.protobuf.Descriptors.MethodDescriptor method) {
22621       if (method.getService() != getDescriptor()) {
22622         throw new java.lang.IllegalArgumentException(
22623           "Service.getRequestPrototype() given method " +
22624           "descriptor for wrong service type.");
22625       }
22626       switch(method.getIndex()) {
22627         case 0:
22628           return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.getDefaultInstance();
22629         case 1:
22630           return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.getDefaultInstance();
22631         case 2:
22632           return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.getDefaultInstance();
22633         case 3:
22634           return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.getDefaultInstance();
22635         case 4:
22636           return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest.getDefaultInstance();
22637         case 5:
22638           return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.getDefaultInstance();
22639         case 6:
22640           return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.getDefaultInstance();
22641         case 7:
22642           return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.getDefaultInstance();
22643         case 8:
22644           return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.getDefaultInstance();
22645         case 9:
22646           return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest.getDefaultInstance();
22647         case 10:
22648           return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.getDefaultInstance();
22649         case 11:
22650           return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.getDefaultInstance();
22651         case 12:
22652           return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.getDefaultInstance();
22653         case 13:
22654           return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.getDefaultInstance();
22655         case 14:
22656           return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.getDefaultInstance();
22657         case 15:
22658           return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.getDefaultInstance();
22659         case 16:
22660           return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest.getDefaultInstance();
22661         default:
22662           throw new java.lang.AssertionError("Can't get here.");
22663       }
22664     }
22665 
22666     public final com.google.protobuf.Message
22667         getResponsePrototype(
22668         com.google.protobuf.Descriptors.MethodDescriptor method) {
22669       if (method.getService() != getDescriptor()) {
22670         throw new java.lang.IllegalArgumentException(
22671           "Service.getResponsePrototype() given method " +
22672           "descriptor for wrong service type.");
22673       }
22674       switch(method.getIndex()) {
22675         case 0:
22676           return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDefaultInstance();
22677         case 1:
22678           return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.getDefaultInstance();
22679         case 2:
22680           return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDefaultInstance();
22681         case 3:
22682           return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDefaultInstance();
22683         case 4:
22684           return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse.getDefaultInstance();
22685         case 5:
22686           return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDefaultInstance();
22687         case 6:
22688           return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDefaultInstance();
22689         case 7:
22690           return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.getDefaultInstance();
22691         case 8:
22692           return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.getDefaultInstance();
22693         case 9:
22694           return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse.getDefaultInstance();
22695         case 10:
22696           return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDefaultInstance();
22697         case 11:
22698           return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDefaultInstance();
22699         case 12:
22700           return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.getDefaultInstance();
22701         case 13:
22702           return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.getDefaultInstance();
22703         case 14:
22704           return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDefaultInstance();
22705         case 15:
22706           return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse.getDefaultInstance();
22707         case 16:
22708           return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse.getDefaultInstance();
22709         default:
22710           throw new java.lang.AssertionError("Can't get here.");
22711       }
22712     }
22713 
22714     public static Stub newStub(
22715         com.google.protobuf.RpcChannel channel) {
22716       return new Stub(channel);
22717     }
22718 
22719     public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService implements Interface {
22720       private Stub(com.google.protobuf.RpcChannel channel) {
22721         this.channel = channel;
22722       }
22723 
22724       private final com.google.protobuf.RpcChannel channel;
22725 
22726       public com.google.protobuf.RpcChannel getChannel() {
22727         return channel;
22728       }
22729 
22730       public  void getRegionInfo(
22731           com.google.protobuf.RpcController controller,
22732           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest request,
22733           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse> done) {
22734         channel.callMethod(
22735           getDescriptor().getMethods().get(0),
22736           controller,
22737           request,
22738           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDefaultInstance(),
22739           com.google.protobuf.RpcUtil.generalizeCallback(
22740             done,
22741             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.class,
22742             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDefaultInstance()));
22743       }
22744 
22745       public  void getStoreFile(
22746           com.google.protobuf.RpcController controller,
22747           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest request,
22748           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse> done) {
22749         channel.callMethod(
22750           getDescriptor().getMethods().get(1),
22751           controller,
22752           request,
22753           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.getDefaultInstance(),
22754           com.google.protobuf.RpcUtil.generalizeCallback(
22755             done,
22756             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.class,
22757             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.getDefaultInstance()));
22758       }
22759 
22760       public  void getOnlineRegion(
22761           com.google.protobuf.RpcController controller,
22762           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest request,
22763           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse> done) {
22764         channel.callMethod(
22765           getDescriptor().getMethods().get(2),
22766           controller,
22767           request,
22768           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDefaultInstance(),
22769           com.google.protobuf.RpcUtil.generalizeCallback(
22770             done,
22771             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.class,
22772             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDefaultInstance()));
22773       }
22774 
22775       public  void openRegion(
22776           com.google.protobuf.RpcController controller,
22777           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest request,
22778           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse> done) {
22779         channel.callMethod(
22780           getDescriptor().getMethods().get(3),
22781           controller,
22782           request,
22783           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDefaultInstance(),
22784           com.google.protobuf.RpcUtil.generalizeCallback(
22785             done,
22786             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.class,
22787             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDefaultInstance()));
22788       }
22789 
22790       public  void warmupRegion(
22791           com.google.protobuf.RpcController controller,
22792           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest request,
22793           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse> done) {
22794         channel.callMethod(
22795           getDescriptor().getMethods().get(4),
22796           controller,
22797           request,
22798           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse.getDefaultInstance(),
22799           com.google.protobuf.RpcUtil.generalizeCallback(
22800             done,
22801             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse.class,
22802             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse.getDefaultInstance()));
22803       }
22804 
22805       public  void closeRegion(
22806           com.google.protobuf.RpcController controller,
22807           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest request,
22808           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse> done) {
22809         channel.callMethod(
22810           getDescriptor().getMethods().get(5),
22811           controller,
22812           request,
22813           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDefaultInstance(),
22814           com.google.protobuf.RpcUtil.generalizeCallback(
22815             done,
22816             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.class,
22817             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDefaultInstance()));
22818       }
22819 
22820       public  void flushRegion(
22821           com.google.protobuf.RpcController controller,
22822           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest request,
22823           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse> done) {
22824         channel.callMethod(
22825           getDescriptor().getMethods().get(6),
22826           controller,
22827           request,
22828           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDefaultInstance(),
22829           com.google.protobuf.RpcUtil.generalizeCallback(
22830             done,
22831             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.class,
22832             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDefaultInstance()));
22833       }
22834 
22835       public  void splitRegion(
22836           com.google.protobuf.RpcController controller,
22837           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest request,
22838           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse> done) {
22839         channel.callMethod(
22840           getDescriptor().getMethods().get(7),
22841           controller,
22842           request,
22843           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.getDefaultInstance(),
22844           com.google.protobuf.RpcUtil.generalizeCallback(
22845             done,
22846             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.class,
22847             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.getDefaultInstance()));
22848       }
22849 
22850       public  void compactRegion(
22851           com.google.protobuf.RpcController controller,
22852           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest request,
22853           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse> done) {
22854         channel.callMethod(
22855           getDescriptor().getMethods().get(8),
22856           controller,
22857           request,
22858           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.getDefaultInstance(),
22859           com.google.protobuf.RpcUtil.generalizeCallback(
22860             done,
22861             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.class,
22862             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.getDefaultInstance()));
22863       }
22864 
22865       public  void mergeRegions(
22866           com.google.protobuf.RpcController controller,
22867           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest request,
22868           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse> done) {
22869         channel.callMethod(
22870           getDescriptor().getMethods().get(9),
22871           controller,
22872           request,
22873           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse.getDefaultInstance(),
22874           com.google.protobuf.RpcUtil.generalizeCallback(
22875             done,
22876             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse.class,
22877             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse.getDefaultInstance()));
22878       }
22879 
22880       public  void replicateWALEntry(
22881           com.google.protobuf.RpcController controller,
22882           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest request,
22883           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse> done) {
22884         channel.callMethod(
22885           getDescriptor().getMethods().get(10),
22886           controller,
22887           request,
22888           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDefaultInstance(),
22889           com.google.protobuf.RpcUtil.generalizeCallback(
22890             done,
22891             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.class,
22892             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDefaultInstance()));
22893       }
22894 
22895       public  void replay(
22896           com.google.protobuf.RpcController controller,
22897           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest request,
22898           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse> done) {
22899         channel.callMethod(
22900           getDescriptor().getMethods().get(11),
22901           controller,
22902           request,
22903           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDefaultInstance(),
22904           com.google.protobuf.RpcUtil.generalizeCallback(
22905             done,
22906             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.class,
22907             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDefaultInstance()));
22908       }
22909 
22910       public  void rollWALWriter(
22911           com.google.protobuf.RpcController controller,
22912           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest request,
22913           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse> done) {
22914         channel.callMethod(
22915           getDescriptor().getMethods().get(12),
22916           controller,
22917           request,
22918           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.getDefaultInstance(),
22919           com.google.protobuf.RpcUtil.generalizeCallback(
22920             done,
22921             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.class,
22922             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.getDefaultInstance()));
22923       }
22924 
22925       public  void getServerInfo(
22926           com.google.protobuf.RpcController controller,
22927           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest request,
22928           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse> done) {
22929         channel.callMethod(
22930           getDescriptor().getMethods().get(13),
22931           controller,
22932           request,
22933           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.getDefaultInstance(),
22934           com.google.protobuf.RpcUtil.generalizeCallback(
22935             done,
22936             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.class,
22937             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.getDefaultInstance()));
22938       }
22939 
22940       public  void stopServer(
22941           com.google.protobuf.RpcController controller,
22942           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest request,
22943           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse> done) {
22944         channel.callMethod(
22945           getDescriptor().getMethods().get(14),
22946           controller,
22947           request,
22948           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDefaultInstance(),
22949           com.google.protobuf.RpcUtil.generalizeCallback(
22950             done,
22951             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.class,
22952             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDefaultInstance()));
22953       }
22954 
22955       public  void updateFavoredNodes(
22956           com.google.protobuf.RpcController controller,
22957           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest request,
22958           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse> done) {
22959         channel.callMethod(
22960           getDescriptor().getMethods().get(15),
22961           controller,
22962           request,
22963           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse.getDefaultInstance(),
22964           com.google.protobuf.RpcUtil.generalizeCallback(
22965             done,
22966             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse.class,
22967             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse.getDefaultInstance()));
22968       }
22969 
22970       public  void updateConfiguration(
22971           com.google.protobuf.RpcController controller,
22972           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest request,
22973           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse> done) {
22974         channel.callMethod(
22975           getDescriptor().getMethods().get(16),
22976           controller,
22977           request,
22978           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse.getDefaultInstance(),
22979           com.google.protobuf.RpcUtil.generalizeCallback(
22980             done,
22981             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse.class,
22982             org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse.getDefaultInstance()));
22983       }
22984     }
22985 
22986     public static BlockingInterface newBlockingStub(
22987         com.google.protobuf.BlockingRpcChannel channel) {
22988       return new BlockingStub(channel);
22989     }
22990 
22991     public interface BlockingInterface {
22992       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse getRegionInfo(
22993           com.google.protobuf.RpcController controller,
22994           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest request)
22995           throws com.google.protobuf.ServiceException;
22996 
22997       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse getStoreFile(
22998           com.google.protobuf.RpcController controller,
22999           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest request)
23000           throws com.google.protobuf.ServiceException;
23001 
23002       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse getOnlineRegion(
23003           com.google.protobuf.RpcController controller,
23004           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest request)
23005           throws com.google.protobuf.ServiceException;
23006 
23007       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse openRegion(
23008           com.google.protobuf.RpcController controller,
23009           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest request)
23010           throws com.google.protobuf.ServiceException;
23011 
23012       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse warmupRegion(
23013           com.google.protobuf.RpcController controller,
23014           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest request)
23015           throws com.google.protobuf.ServiceException;
23016 
23017       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse closeRegion(
23018           com.google.protobuf.RpcController controller,
23019           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest request)
23020           throws com.google.protobuf.ServiceException;
23021 
23022       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse flushRegion(
23023           com.google.protobuf.RpcController controller,
23024           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest request)
23025           throws com.google.protobuf.ServiceException;
23026 
23027       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse splitRegion(
23028           com.google.protobuf.RpcController controller,
23029           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest request)
23030           throws com.google.protobuf.ServiceException;
23031 
23032       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse compactRegion(
23033           com.google.protobuf.RpcController controller,
23034           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest request)
23035           throws com.google.protobuf.ServiceException;
23036 
23037       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse mergeRegions(
23038           com.google.protobuf.RpcController controller,
23039           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest request)
23040           throws com.google.protobuf.ServiceException;
23041 
23042       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse replicateWALEntry(
23043           com.google.protobuf.RpcController controller,
23044           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest request)
23045           throws com.google.protobuf.ServiceException;
23046 
23047       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse replay(
23048           com.google.protobuf.RpcController controller,
23049           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest request)
23050           throws com.google.protobuf.ServiceException;
23051 
23052       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse rollWALWriter(
23053           com.google.protobuf.RpcController controller,
23054           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest request)
23055           throws com.google.protobuf.ServiceException;
23056 
23057       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse getServerInfo(
23058           com.google.protobuf.RpcController controller,
23059           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest request)
23060           throws com.google.protobuf.ServiceException;
23061 
23062       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse stopServer(
23063           com.google.protobuf.RpcController controller,
23064           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest request)
23065           throws com.google.protobuf.ServiceException;
23066 
23067       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse updateFavoredNodes(
23068           com.google.protobuf.RpcController controller,
23069           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest request)
23070           throws com.google.protobuf.ServiceException;
23071 
23072       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse updateConfiguration(
23073           com.google.protobuf.RpcController controller,
23074           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest request)
23075           throws com.google.protobuf.ServiceException;
23076     }
23077 
23078     private static final class BlockingStub implements BlockingInterface {
23079       private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
23080         this.channel = channel;
23081       }
23082 
23083       private final com.google.protobuf.BlockingRpcChannel channel;
23084 
23085       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse getRegionInfo(
23086           com.google.protobuf.RpcController controller,
23087           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest request)
23088           throws com.google.protobuf.ServiceException {
23089         return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse) channel.callBlockingMethod(
23090           getDescriptor().getMethods().get(0),
23091           controller,
23092           request,
23093           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDefaultInstance());
23094       }
23095 
23096 
23097       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse getStoreFile(
23098           com.google.protobuf.RpcController controller,
23099           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest request)
23100           throws com.google.protobuf.ServiceException {
23101         return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse) channel.callBlockingMethod(
23102           getDescriptor().getMethods().get(1),
23103           controller,
23104           request,
23105           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.getDefaultInstance());
23106       }
23107 
23108 
23109       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse getOnlineRegion(
23110           com.google.protobuf.RpcController controller,
23111           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest request)
23112           throws com.google.protobuf.ServiceException {
23113         return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse) channel.callBlockingMethod(
23114           getDescriptor().getMethods().get(2),
23115           controller,
23116           request,
23117           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDefaultInstance());
23118       }
23119 
23120 
23121       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse openRegion(
23122           com.google.protobuf.RpcController controller,
23123           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest request)
23124           throws com.google.protobuf.ServiceException {
23125         return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse) channel.callBlockingMethod(
23126           getDescriptor().getMethods().get(3),
23127           controller,
23128           request,
23129           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDefaultInstance());
23130       }
23131 
23132 
23133       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse warmupRegion(
23134           com.google.protobuf.RpcController controller,
23135           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest request)
23136           throws com.google.protobuf.ServiceException {
23137         return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse) channel.callBlockingMethod(
23138           getDescriptor().getMethods().get(4),
23139           controller,
23140           request,
23141           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse.getDefaultInstance());
23142       }
23143 
23144 
23145       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse closeRegion(
23146           com.google.protobuf.RpcController controller,
23147           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest request)
23148           throws com.google.protobuf.ServiceException {
23149         return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse) channel.callBlockingMethod(
23150           getDescriptor().getMethods().get(5),
23151           controller,
23152           request,
23153           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDefaultInstance());
23154       }
23155 
23156 
23157       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse flushRegion(
23158           com.google.protobuf.RpcController controller,
23159           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest request)
23160           throws com.google.protobuf.ServiceException {
23161         return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse) channel.callBlockingMethod(
23162           getDescriptor().getMethods().get(6),
23163           controller,
23164           request,
23165           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDefaultInstance());
23166       }
23167 
23168 
23169       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse splitRegion(
23170           com.google.protobuf.RpcController controller,
23171           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest request)
23172           throws com.google.protobuf.ServiceException {
23173         return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse) channel.callBlockingMethod(
23174           getDescriptor().getMethods().get(7),
23175           controller,
23176           request,
23177           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.getDefaultInstance());
23178       }
23179 
23180 
23181       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse compactRegion(
23182           com.google.protobuf.RpcController controller,
23183           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest request)
23184           throws com.google.protobuf.ServiceException {
23185         return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse) channel.callBlockingMethod(
23186           getDescriptor().getMethods().get(8),
23187           controller,
23188           request,
23189           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.getDefaultInstance());
23190       }
23191 
23192 
23193       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse mergeRegions(
23194           com.google.protobuf.RpcController controller,
23195           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest request)
23196           throws com.google.protobuf.ServiceException {
23197         return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse) channel.callBlockingMethod(
23198           getDescriptor().getMethods().get(9),
23199           controller,
23200           request,
23201           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse.getDefaultInstance());
23202       }
23203 
23204 
23205       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse replicateWALEntry(
23206           com.google.protobuf.RpcController controller,
23207           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest request)
23208           throws com.google.protobuf.ServiceException {
23209         return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse) channel.callBlockingMethod(
23210           getDescriptor().getMethods().get(10),
23211           controller,
23212           request,
23213           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDefaultInstance());
23214       }
23215 
23216 
23217       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse replay(
23218           com.google.protobuf.RpcController controller,
23219           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest request)
23220           throws com.google.protobuf.ServiceException {
23221         return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse) channel.callBlockingMethod(
23222           getDescriptor().getMethods().get(11),
23223           controller,
23224           request,
23225           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDefaultInstance());
23226       }
23227 
23228 
23229       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse rollWALWriter(
23230           com.google.protobuf.RpcController controller,
23231           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest request)
23232           throws com.google.protobuf.ServiceException {
23233         return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse) channel.callBlockingMethod(
23234           getDescriptor().getMethods().get(12),
23235           controller,
23236           request,
23237           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.getDefaultInstance());
23238       }
23239 
23240 
23241       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse getServerInfo(
23242           com.google.protobuf.RpcController controller,
23243           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest request)
23244           throws com.google.protobuf.ServiceException {
23245         return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse) channel.callBlockingMethod(
23246           getDescriptor().getMethods().get(13),
23247           controller,
23248           request,
23249           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.getDefaultInstance());
23250       }
23251 
23252 
23253       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse stopServer(
23254           com.google.protobuf.RpcController controller,
23255           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest request)
23256           throws com.google.protobuf.ServiceException {
23257         return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse) channel.callBlockingMethod(
23258           getDescriptor().getMethods().get(14),
23259           controller,
23260           request,
23261           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDefaultInstance());
23262       }
23263 
23264 
23265       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse updateFavoredNodes(
23266           com.google.protobuf.RpcController controller,
23267           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest request)
23268           throws com.google.protobuf.ServiceException {
23269         return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse) channel.callBlockingMethod(
23270           getDescriptor().getMethods().get(15),
23271           controller,
23272           request,
23273           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse.getDefaultInstance());
23274       }
23275 
23276 
23277       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse updateConfiguration(
23278           com.google.protobuf.RpcController controller,
23279           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest request)
23280           throws com.google.protobuf.ServiceException {
23281         return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse) channel.callBlockingMethod(
23282           getDescriptor().getMethods().get(16),
23283           controller,
23284           request,
23285           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse.getDefaultInstance());
23286       }
23287 
23288     }
23289 
23290     // @@protoc_insertion_point(class_scope:hbase.pb.AdminService)
23291   }
23292 
23293   private static com.google.protobuf.Descriptors.Descriptor
23294     internal_static_hbase_pb_GetRegionInfoRequest_descriptor;
23295   private static
23296     com.google.protobuf.GeneratedMessage.FieldAccessorTable
23297       internal_static_hbase_pb_GetRegionInfoRequest_fieldAccessorTable;
23298   private static com.google.protobuf.Descriptors.Descriptor
23299     internal_static_hbase_pb_GetRegionInfoResponse_descriptor;
23300   private static
23301     com.google.protobuf.GeneratedMessage.FieldAccessorTable
23302       internal_static_hbase_pb_GetRegionInfoResponse_fieldAccessorTable;
23303   private static com.google.protobuf.Descriptors.Descriptor
23304     internal_static_hbase_pb_GetStoreFileRequest_descriptor;
23305   private static
23306     com.google.protobuf.GeneratedMessage.FieldAccessorTable
23307       internal_static_hbase_pb_GetStoreFileRequest_fieldAccessorTable;
23308   private static com.google.protobuf.Descriptors.Descriptor
23309     internal_static_hbase_pb_GetStoreFileResponse_descriptor;
23310   private static
23311     com.google.protobuf.GeneratedMessage.FieldAccessorTable
23312       internal_static_hbase_pb_GetStoreFileResponse_fieldAccessorTable;
23313   private static com.google.protobuf.Descriptors.Descriptor
23314     internal_static_hbase_pb_GetOnlineRegionRequest_descriptor;
23315   private static
23316     com.google.protobuf.GeneratedMessage.FieldAccessorTable
23317       internal_static_hbase_pb_GetOnlineRegionRequest_fieldAccessorTable;
23318   private static com.google.protobuf.Descriptors.Descriptor
23319     internal_static_hbase_pb_GetOnlineRegionResponse_descriptor;
23320   private static
23321     com.google.protobuf.GeneratedMessage.FieldAccessorTable
23322       internal_static_hbase_pb_GetOnlineRegionResponse_fieldAccessorTable;
23323   private static com.google.protobuf.Descriptors.Descriptor
23324     internal_static_hbase_pb_OpenRegionRequest_descriptor;
23325   private static
23326     com.google.protobuf.GeneratedMessage.FieldAccessorTable
23327       internal_static_hbase_pb_OpenRegionRequest_fieldAccessorTable;
23328   private static com.google.protobuf.Descriptors.Descriptor
23329     internal_static_hbase_pb_OpenRegionRequest_RegionOpenInfo_descriptor;
23330   private static
23331     com.google.protobuf.GeneratedMessage.FieldAccessorTable
23332       internal_static_hbase_pb_OpenRegionRequest_RegionOpenInfo_fieldAccessorTable;
23333   private static com.google.protobuf.Descriptors.Descriptor
23334     internal_static_hbase_pb_OpenRegionResponse_descriptor;
23335   private static
23336     com.google.protobuf.GeneratedMessage.FieldAccessorTable
23337       internal_static_hbase_pb_OpenRegionResponse_fieldAccessorTable;
23338   private static com.google.protobuf.Descriptors.Descriptor
23339     internal_static_hbase_pb_WarmupRegionRequest_descriptor;
23340   private static
23341     com.google.protobuf.GeneratedMessage.FieldAccessorTable
23342       internal_static_hbase_pb_WarmupRegionRequest_fieldAccessorTable;
23343   private static com.google.protobuf.Descriptors.Descriptor
23344     internal_static_hbase_pb_WarmupRegionResponse_descriptor;
23345   private static
23346     com.google.protobuf.GeneratedMessage.FieldAccessorTable
23347       internal_static_hbase_pb_WarmupRegionResponse_fieldAccessorTable;
23348   private static com.google.protobuf.Descriptors.Descriptor
23349     internal_static_hbase_pb_CloseRegionRequest_descriptor;
23350   private static
23351     com.google.protobuf.GeneratedMessage.FieldAccessorTable
23352       internal_static_hbase_pb_CloseRegionRequest_fieldAccessorTable;
23353   private static com.google.protobuf.Descriptors.Descriptor
23354     internal_static_hbase_pb_CloseRegionResponse_descriptor;
23355   private static
23356     com.google.protobuf.GeneratedMessage.FieldAccessorTable
23357       internal_static_hbase_pb_CloseRegionResponse_fieldAccessorTable;
23358   private static com.google.protobuf.Descriptors.Descriptor
23359     internal_static_hbase_pb_FlushRegionRequest_descriptor;
23360   private static
23361     com.google.protobuf.GeneratedMessage.FieldAccessorTable
23362       internal_static_hbase_pb_FlushRegionRequest_fieldAccessorTable;
23363   private static com.google.protobuf.Descriptors.Descriptor
23364     internal_static_hbase_pb_FlushRegionResponse_descriptor;
23365   private static
23366     com.google.protobuf.GeneratedMessage.FieldAccessorTable
23367       internal_static_hbase_pb_FlushRegionResponse_fieldAccessorTable;
23368   private static com.google.protobuf.Descriptors.Descriptor
23369     internal_static_hbase_pb_SplitRegionRequest_descriptor;
23370   private static
23371     com.google.protobuf.GeneratedMessage.FieldAccessorTable
23372       internal_static_hbase_pb_SplitRegionRequest_fieldAccessorTable;
23373   private static com.google.protobuf.Descriptors.Descriptor
23374     internal_static_hbase_pb_SplitRegionResponse_descriptor;
23375   private static
23376     com.google.protobuf.GeneratedMessage.FieldAccessorTable
23377       internal_static_hbase_pb_SplitRegionResponse_fieldAccessorTable;
23378   private static com.google.protobuf.Descriptors.Descriptor
23379     internal_static_hbase_pb_CompactRegionRequest_descriptor;
23380   private static
23381     com.google.protobuf.GeneratedMessage.FieldAccessorTable
23382       internal_static_hbase_pb_CompactRegionRequest_fieldAccessorTable;
23383   private static com.google.protobuf.Descriptors.Descriptor
23384     internal_static_hbase_pb_CompactRegionResponse_descriptor;
23385   private static
23386     com.google.protobuf.GeneratedMessage.FieldAccessorTable
23387       internal_static_hbase_pb_CompactRegionResponse_fieldAccessorTable;
23388   private static com.google.protobuf.Descriptors.Descriptor
23389     internal_static_hbase_pb_UpdateFavoredNodesRequest_descriptor;
23390   private static
23391     com.google.protobuf.GeneratedMessage.FieldAccessorTable
23392       internal_static_hbase_pb_UpdateFavoredNodesRequest_fieldAccessorTable;
23393   private static com.google.protobuf.Descriptors.Descriptor
23394     internal_static_hbase_pb_UpdateFavoredNodesRequest_RegionUpdateInfo_descriptor;
23395   private static
23396     com.google.protobuf.GeneratedMessage.FieldAccessorTable
23397       internal_static_hbase_pb_UpdateFavoredNodesRequest_RegionUpdateInfo_fieldAccessorTable;
23398   private static com.google.protobuf.Descriptors.Descriptor
23399     internal_static_hbase_pb_UpdateFavoredNodesResponse_descriptor;
23400   private static
23401     com.google.protobuf.GeneratedMessage.FieldAccessorTable
23402       internal_static_hbase_pb_UpdateFavoredNodesResponse_fieldAccessorTable;
23403   private static com.google.protobuf.Descriptors.Descriptor
23404     internal_static_hbase_pb_MergeRegionsRequest_descriptor;
23405   private static
23406     com.google.protobuf.GeneratedMessage.FieldAccessorTable
23407       internal_static_hbase_pb_MergeRegionsRequest_fieldAccessorTable;
23408   private static com.google.protobuf.Descriptors.Descriptor
23409     internal_static_hbase_pb_MergeRegionsResponse_descriptor;
23410   private static
23411     com.google.protobuf.GeneratedMessage.FieldAccessorTable
23412       internal_static_hbase_pb_MergeRegionsResponse_fieldAccessorTable;
23413   private static com.google.protobuf.Descriptors.Descriptor
23414     internal_static_hbase_pb_WALEntry_descriptor;
23415   private static
23416     com.google.protobuf.GeneratedMessage.FieldAccessorTable
23417       internal_static_hbase_pb_WALEntry_fieldAccessorTable;
23418   private static com.google.protobuf.Descriptors.Descriptor
23419     internal_static_hbase_pb_ReplicateWALEntryRequest_descriptor;
23420   private static
23421     com.google.protobuf.GeneratedMessage.FieldAccessorTable
23422       internal_static_hbase_pb_ReplicateWALEntryRequest_fieldAccessorTable;
23423   private static com.google.protobuf.Descriptors.Descriptor
23424     internal_static_hbase_pb_ReplicateWALEntryResponse_descriptor;
23425   private static
23426     com.google.protobuf.GeneratedMessage.FieldAccessorTable
23427       internal_static_hbase_pb_ReplicateWALEntryResponse_fieldAccessorTable;
23428   private static com.google.protobuf.Descriptors.Descriptor
23429     internal_static_hbase_pb_RollWALWriterRequest_descriptor;
23430   private static
23431     com.google.protobuf.GeneratedMessage.FieldAccessorTable
23432       internal_static_hbase_pb_RollWALWriterRequest_fieldAccessorTable;
23433   private static com.google.protobuf.Descriptors.Descriptor
23434     internal_static_hbase_pb_RollWALWriterResponse_descriptor;
23435   private static
23436     com.google.protobuf.GeneratedMessage.FieldAccessorTable
23437       internal_static_hbase_pb_RollWALWriterResponse_fieldAccessorTable;
23438   private static com.google.protobuf.Descriptors.Descriptor
23439     internal_static_hbase_pb_StopServerRequest_descriptor;
23440   private static
23441     com.google.protobuf.GeneratedMessage.FieldAccessorTable
23442       internal_static_hbase_pb_StopServerRequest_fieldAccessorTable;
23443   private static com.google.protobuf.Descriptors.Descriptor
23444     internal_static_hbase_pb_StopServerResponse_descriptor;
23445   private static
23446     com.google.protobuf.GeneratedMessage.FieldAccessorTable
23447       internal_static_hbase_pb_StopServerResponse_fieldAccessorTable;
23448   private static com.google.protobuf.Descriptors.Descriptor
23449     internal_static_hbase_pb_GetServerInfoRequest_descriptor;
23450   private static
23451     com.google.protobuf.GeneratedMessage.FieldAccessorTable
23452       internal_static_hbase_pb_GetServerInfoRequest_fieldAccessorTable;
23453   private static com.google.protobuf.Descriptors.Descriptor
23454     internal_static_hbase_pb_ServerInfo_descriptor;
23455   private static
23456     com.google.protobuf.GeneratedMessage.FieldAccessorTable
23457       internal_static_hbase_pb_ServerInfo_fieldAccessorTable;
23458   private static com.google.protobuf.Descriptors.Descriptor
23459     internal_static_hbase_pb_GetServerInfoResponse_descriptor;
23460   private static
23461     com.google.protobuf.GeneratedMessage.FieldAccessorTable
23462       internal_static_hbase_pb_GetServerInfoResponse_fieldAccessorTable;
23463   private static com.google.protobuf.Descriptors.Descriptor
23464     internal_static_hbase_pb_UpdateConfigurationRequest_descriptor;
23465   private static
23466     com.google.protobuf.GeneratedMessage.FieldAccessorTable
23467       internal_static_hbase_pb_UpdateConfigurationRequest_fieldAccessorTable;
23468   private static com.google.protobuf.Descriptors.Descriptor
23469     internal_static_hbase_pb_UpdateConfigurationResponse_descriptor;
23470   private static
23471     com.google.protobuf.GeneratedMessage.FieldAccessorTable
23472       internal_static_hbase_pb_UpdateConfigurationResponse_fieldAccessorTable;
23473 
23474   public static com.google.protobuf.Descriptors.FileDescriptor
23475       getDescriptor() {
23476     return descriptor;
23477   }
23478   private static com.google.protobuf.Descriptors.FileDescriptor
23479       descriptor;
23480   static {
23481     java.lang.String[] descriptorData = {
23482       "\n\013Admin.proto\022\010hbase.pb\032\014Client.proto\032\013H" +
23483       "Base.proto\032\tWAL.proto\"[\n\024GetRegionInfoRe" +
23484       "quest\022)\n\006region\030\001 \002(\0132\031.hbase.pb.RegionS" +
23485       "pecifier\022\030\n\020compaction_state\030\002 \001(\010\"\353\001\n\025G" +
23486       "etRegionInfoResponse\022)\n\013region_info\030\001 \002(" +
23487       "\0132\024.hbase.pb.RegionInfo\022I\n\020compaction_st" +
23488       "ate\030\002 \001(\0162/.hbase.pb.GetRegionInfoRespon" +
23489       "se.CompactionState\022\024\n\014isRecovering\030\003 \001(\010" +
23490       "\"F\n\017CompactionState\022\010\n\004NONE\020\000\022\t\n\005MINOR\020\001" +
23491       "\022\t\n\005MAJOR\020\002\022\023\n\017MAJOR_AND_MINOR\020\003\"P\n\023GetS",
23492       "toreFileRequest\022)\n\006region\030\001 \002(\0132\031.hbase." +
23493       "pb.RegionSpecifier\022\016\n\006family\030\002 \003(\014\"*\n\024Ge" +
23494       "tStoreFileResponse\022\022\n\nstore_file\030\001 \003(\t\"\030" +
23495       "\n\026GetOnlineRegionRequest\"D\n\027GetOnlineReg" +
23496       "ionResponse\022)\n\013region_info\030\001 \003(\0132\024.hbase" +
23497       ".pb.RegionInfo\"\263\002\n\021OpenRegionRequest\022=\n\t" +
23498       "open_info\030\001 \003(\0132*.hbase.pb.OpenRegionReq" +
23499       "uest.RegionOpenInfo\022\027\n\017serverStartCode\030\002" +
23500       " \001(\004\022\032\n\022master_system_time\030\005 \001(\004\032\251\001\n\016Reg" +
23501       "ionOpenInfo\022$\n\006region\030\001 \002(\0132\024.hbase.pb.R",
23502       "egionInfo\022\037\n\027version_of_offline_node\030\002 \001" +
23503       "(\r\022+\n\rfavored_nodes\030\003 \003(\0132\024.hbase.pb.Ser" +
23504       "verName\022#\n\033openForDistributedLogReplay\030\004" +
23505       " \001(\010\"\246\001\n\022OpenRegionResponse\022F\n\ropening_s" +
23506       "tate\030\001 \003(\0162/.hbase.pb.OpenRegionResponse" +
23507       ".RegionOpeningState\"H\n\022RegionOpeningStat" +
23508       "e\022\n\n\006OPENED\020\000\022\022\n\016ALREADY_OPENED\020\001\022\022\n\016FAI" +
23509       "LED_OPENING\020\002\"?\n\023WarmupRegionRequest\022(\n\n" +
23510       "regionInfo\030\001 \002(\0132\024.hbase.pb.RegionInfo\"\026" +
23511       "\n\024WarmupRegionResponse\"\313\001\n\022CloseRegionRe",
23512       "quest\022)\n\006region\030\001 \002(\0132\031.hbase.pb.RegionS" +
23513       "pecifier\022\037\n\027version_of_closing_node\030\002 \001(" +
23514       "\r\022\036\n\020transition_in_ZK\030\003 \001(\010:\004true\0220\n\022des" +
23515       "tination_server\030\004 \001(\0132\024.hbase.pb.ServerN" +
23516       "ame\022\027\n\017serverStartCode\030\005 \001(\004\"%\n\023CloseReg" +
23517       "ionResponse\022\016\n\006closed\030\001 \002(\010\"y\n\022FlushRegi" +
23518       "onRequest\022)\n\006region\030\001 \002(\0132\031.hbase.pb.Reg" +
23519       "ionSpecifier\022\030\n\020if_older_than_ts\030\002 \001(\004\022\036" +
23520       "\n\026write_flush_wal_marker\030\003 \001(\010\"_\n\023FlushR" +
23521       "egionResponse\022\027\n\017last_flush_time\030\001 \002(\004\022\017",
23522       "\n\007flushed\030\002 \001(\010\022\036\n\026wrote_flush_wal_marke" +
23523       "r\030\003 \001(\010\"T\n\022SplitRegionRequest\022)\n\006region\030" +
23524       "\001 \002(\0132\031.hbase.pb.RegionSpecifier\022\023\n\013spli" +
23525       "t_point\030\002 \001(\014\"\025\n\023SplitRegionResponse\"`\n\024" +
23526       "CompactRegionRequest\022)\n\006region\030\001 \002(\0132\031.h" +
23527       "base.pb.RegionSpecifier\022\r\n\005major\030\002 \001(\010\022\016" +
23528       "\n\006family\030\003 \001(\014\"\027\n\025CompactRegionResponse\"" +
23529       "\315\001\n\031UpdateFavoredNodesRequest\022I\n\013update_" +
23530       "info\030\001 \003(\01324.hbase.pb.UpdateFavoredNodes" +
23531       "Request.RegionUpdateInfo\032e\n\020RegionUpdate",
23532       "Info\022$\n\006region\030\001 \002(\0132\024.hbase.pb.RegionIn" +
23533       "fo\022+\n\rfavored_nodes\030\002 \003(\0132\024.hbase.pb.Ser" +
23534       "verName\".\n\032UpdateFavoredNodesResponse\022\020\n" +
23535       "\010response\030\001 \001(\r\"\244\001\n\023MergeRegionsRequest\022" +
23536       "+\n\010region_a\030\001 \002(\0132\031.hbase.pb.RegionSpeci" +
23537       "fier\022+\n\010region_b\030\002 \002(\0132\031.hbase.pb.Region" +
23538       "Specifier\022\027\n\010forcible\030\003 \001(\010:\005false\022\032\n\022ma" +
23539       "ster_system_time\030\004 \001(\004\"\026\n\024MergeRegionsRe" +
23540       "sponse\"a\n\010WALEntry\022\035\n\003key\030\001 \002(\0132\020.hbase." +
23541       "pb.WALKey\022\027\n\017key_value_bytes\030\002 \003(\014\022\035\n\025as",
23542       "sociated_cell_count\030\003 \001(\005\"=\n\030ReplicateWA" +
23543       "LEntryRequest\022!\n\005entry\030\001 \003(\0132\022.hbase.pb." +
23544       "WALEntry\"\033\n\031ReplicateWALEntryResponse\"\026\n" +
23545       "\024RollWALWriterRequest\"0\n\025RollWALWriterRe" +
23546       "sponse\022\027\n\017region_to_flush\030\001 \003(\014\"#\n\021StopS" +
23547       "erverRequest\022\016\n\006reason\030\001 \002(\t\"\024\n\022StopServ" +
23548       "erResponse\"\026\n\024GetServerInfoRequest\"K\n\nSe" +
23549       "rverInfo\022)\n\013server_name\030\001 \002(\0132\024.hbase.pb" +
23550       ".ServerName\022\022\n\nwebui_port\030\002 \001(\r\"B\n\025GetSe" +
23551       "rverInfoResponse\022)\n\013server_info\030\001 \002(\0132\024.",
23552       "hbase.pb.ServerInfo\"\034\n\032UpdateConfigurati" +
23553       "onRequest\"\035\n\033UpdateConfigurationResponse" +
23554       "2\207\013\n\014AdminService\022P\n\rGetRegionInfo\022\036.hba" +
23555       "se.pb.GetRegionInfoRequest\032\037.hbase.pb.Ge" +
23556       "tRegionInfoResponse\022M\n\014GetStoreFile\022\035.hb" +
23557       "ase.pb.GetStoreFileRequest\032\036.hbase.pb.Ge" +
23558       "tStoreFileResponse\022V\n\017GetOnlineRegion\022 ." +
23559       "hbase.pb.GetOnlineRegionRequest\032!.hbase." +
23560       "pb.GetOnlineRegionResponse\022G\n\nOpenRegion" +
23561       "\022\033.hbase.pb.OpenRegionRequest\032\034.hbase.pb",
23562       ".OpenRegionResponse\022M\n\014WarmupRegion\022\035.hb" +
23563       "ase.pb.WarmupRegionRequest\032\036.hbase.pb.Wa" +
23564       "rmupRegionResponse\022J\n\013CloseRegion\022\034.hbas" +
23565       "e.pb.CloseRegionRequest\032\035.hbase.pb.Close" +
23566       "RegionResponse\022J\n\013FlushRegion\022\034.hbase.pb" +
23567       ".FlushRegionRequest\032\035.hbase.pb.FlushRegi" +
23568       "onResponse\022J\n\013SplitRegion\022\034.hbase.pb.Spl" +
23569       "itRegionRequest\032\035.hbase.pb.SplitRegionRe" +
23570       "sponse\022P\n\rCompactRegion\022\036.hbase.pb.Compa" +
23571       "ctRegionRequest\032\037.hbase.pb.CompactRegion",
23572       "Response\022M\n\014MergeRegions\022\035.hbase.pb.Merg" +
23573       "eRegionsRequest\032\036.hbase.pb.MergeRegionsR" +
23574       "esponse\022\\\n\021ReplicateWALEntry\022\".hbase.pb." +
23575       "ReplicateWALEntryRequest\032#.hbase.pb.Repl" +
23576       "icateWALEntryResponse\022Q\n\006Replay\022\".hbase." +
23577       "pb.ReplicateWALEntryRequest\032#.hbase.pb.R" +
23578       "eplicateWALEntryResponse\022P\n\rRollWALWrite" +
23579       "r\022\036.hbase.pb.RollWALWriterRequest\032\037.hbas" +
23580       "e.pb.RollWALWriterResponse\022P\n\rGetServerI" +
23581       "nfo\022\036.hbase.pb.GetServerInfoRequest\032\037.hb",
23582       "ase.pb.GetServerInfoResponse\022G\n\nStopServ" +
23583       "er\022\033.hbase.pb.StopServerRequest\032\034.hbase." +
23584       "pb.StopServerResponse\022_\n\022UpdateFavoredNo" +
23585       "des\022#.hbase.pb.UpdateFavoredNodesRequest" +
23586       "\032$.hbase.pb.UpdateFavoredNodesResponse\022b" +
23587       "\n\023UpdateConfiguration\022$.hbase.pb.UpdateC" +
23588       "onfigurationRequest\032%.hbase.pb.UpdateCon" +
23589       "figurationResponseBA\n*org.apache.hadoop." +
23590       "hbase.protobuf.generatedB\013AdminProtosH\001\210" +
23591       "\001\001\240\001\001"
23592     };
23593     com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
23594       new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
23595         public com.google.protobuf.ExtensionRegistry assignDescriptors(
23596             com.google.protobuf.Descriptors.FileDescriptor root) {
23597           descriptor = root;
23598           internal_static_hbase_pb_GetRegionInfoRequest_descriptor =
23599             getDescriptor().getMessageTypes().get(0);
23600           internal_static_hbase_pb_GetRegionInfoRequest_fieldAccessorTable = new
23601             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
23602               internal_static_hbase_pb_GetRegionInfoRequest_descriptor,
23603               new java.lang.String[] { "Region", "CompactionState", });
23604           internal_static_hbase_pb_GetRegionInfoResponse_descriptor =
23605             getDescriptor().getMessageTypes().get(1);
23606           internal_static_hbase_pb_GetRegionInfoResponse_fieldAccessorTable = new
23607             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
23608               internal_static_hbase_pb_GetRegionInfoResponse_descriptor,
23609               new java.lang.String[] { "RegionInfo", "CompactionState", "IsRecovering", });
23610           internal_static_hbase_pb_GetStoreFileRequest_descriptor =
23611             getDescriptor().getMessageTypes().get(2);
23612           internal_static_hbase_pb_GetStoreFileRequest_fieldAccessorTable = new
23613             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
23614               internal_static_hbase_pb_GetStoreFileRequest_descriptor,
23615               new java.lang.String[] { "Region", "Family", });
23616           internal_static_hbase_pb_GetStoreFileResponse_descriptor =
23617             getDescriptor().getMessageTypes().get(3);
23618           internal_static_hbase_pb_GetStoreFileResponse_fieldAccessorTable = new
23619             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
23620               internal_static_hbase_pb_GetStoreFileResponse_descriptor,
23621               new java.lang.String[] { "StoreFile", });
23622           internal_static_hbase_pb_GetOnlineRegionRequest_descriptor =
23623             getDescriptor().getMessageTypes().get(4);
23624           internal_static_hbase_pb_GetOnlineRegionRequest_fieldAccessorTable = new
23625             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
23626               internal_static_hbase_pb_GetOnlineRegionRequest_descriptor,
23627               new java.lang.String[] { });
23628           internal_static_hbase_pb_GetOnlineRegionResponse_descriptor =
23629             getDescriptor().getMessageTypes().get(5);
23630           internal_static_hbase_pb_GetOnlineRegionResponse_fieldAccessorTable = new
23631             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
23632               internal_static_hbase_pb_GetOnlineRegionResponse_descriptor,
23633               new java.lang.String[] { "RegionInfo", });
23634           internal_static_hbase_pb_OpenRegionRequest_descriptor =
23635             getDescriptor().getMessageTypes().get(6);
23636           internal_static_hbase_pb_OpenRegionRequest_fieldAccessorTable = new
23637             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
23638               internal_static_hbase_pb_OpenRegionRequest_descriptor,
23639               new java.lang.String[] { "OpenInfo", "ServerStartCode", "MasterSystemTime", });
23640           internal_static_hbase_pb_OpenRegionRequest_RegionOpenInfo_descriptor =
23641             internal_static_hbase_pb_OpenRegionRequest_descriptor.getNestedTypes().get(0);
23642           internal_static_hbase_pb_OpenRegionRequest_RegionOpenInfo_fieldAccessorTable = new
23643             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
23644               internal_static_hbase_pb_OpenRegionRequest_RegionOpenInfo_descriptor,
23645               new java.lang.String[] { "Region", "VersionOfOfflineNode", "FavoredNodes", "OpenForDistributedLogReplay", });
23646           internal_static_hbase_pb_OpenRegionResponse_descriptor =
23647             getDescriptor().getMessageTypes().get(7);
23648           internal_static_hbase_pb_OpenRegionResponse_fieldAccessorTable = new
23649             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
23650               internal_static_hbase_pb_OpenRegionResponse_descriptor,
23651               new java.lang.String[] { "OpeningState", });
23652           internal_static_hbase_pb_WarmupRegionRequest_descriptor =
23653             getDescriptor().getMessageTypes().get(8);
23654           internal_static_hbase_pb_WarmupRegionRequest_fieldAccessorTable = new
23655             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
23656               internal_static_hbase_pb_WarmupRegionRequest_descriptor,
23657               new java.lang.String[] { "RegionInfo", });
23658           internal_static_hbase_pb_WarmupRegionResponse_descriptor =
23659             getDescriptor().getMessageTypes().get(9);
23660           internal_static_hbase_pb_WarmupRegionResponse_fieldAccessorTable = new
23661             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
23662               internal_static_hbase_pb_WarmupRegionResponse_descriptor,
23663               new java.lang.String[] { });
23664           internal_static_hbase_pb_CloseRegionRequest_descriptor =
23665             getDescriptor().getMessageTypes().get(10);
23666           internal_static_hbase_pb_CloseRegionRequest_fieldAccessorTable = new
23667             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
23668               internal_static_hbase_pb_CloseRegionRequest_descriptor,
23669               new java.lang.String[] { "Region", "VersionOfClosingNode", "TransitionInZK", "DestinationServer", "ServerStartCode", });
23670           internal_static_hbase_pb_CloseRegionResponse_descriptor =
23671             getDescriptor().getMessageTypes().get(11);
23672           internal_static_hbase_pb_CloseRegionResponse_fieldAccessorTable = new
23673             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
23674               internal_static_hbase_pb_CloseRegionResponse_descriptor,
23675               new java.lang.String[] { "Closed", });
23676           internal_static_hbase_pb_FlushRegionRequest_descriptor =
23677             getDescriptor().getMessageTypes().get(12);
23678           internal_static_hbase_pb_FlushRegionRequest_fieldAccessorTable = new
23679             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
23680               internal_static_hbase_pb_FlushRegionRequest_descriptor,
23681               new java.lang.String[] { "Region", "IfOlderThanTs", "WriteFlushWalMarker", });
23682           internal_static_hbase_pb_FlushRegionResponse_descriptor =
23683             getDescriptor().getMessageTypes().get(13);
23684           internal_static_hbase_pb_FlushRegionResponse_fieldAccessorTable = new
23685             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
23686               internal_static_hbase_pb_FlushRegionResponse_descriptor,
23687               new java.lang.String[] { "LastFlushTime", "Flushed", "WroteFlushWalMarker", });
23688           internal_static_hbase_pb_SplitRegionRequest_descriptor =
23689             getDescriptor().getMessageTypes().get(14);
23690           internal_static_hbase_pb_SplitRegionRequest_fieldAccessorTable = new
23691             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
23692               internal_static_hbase_pb_SplitRegionRequest_descriptor,
23693               new java.lang.String[] { "Region", "SplitPoint", });
23694           internal_static_hbase_pb_SplitRegionResponse_descriptor =
23695             getDescriptor().getMessageTypes().get(15);
23696           internal_static_hbase_pb_SplitRegionResponse_fieldAccessorTable = new
23697             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
23698               internal_static_hbase_pb_SplitRegionResponse_descriptor,
23699               new java.lang.String[] { });
23700           internal_static_hbase_pb_CompactRegionRequest_descriptor =
23701             getDescriptor().getMessageTypes().get(16);
23702           internal_static_hbase_pb_CompactRegionRequest_fieldAccessorTable = new
23703             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
23704               internal_static_hbase_pb_CompactRegionRequest_descriptor,
23705               new java.lang.String[] { "Region", "Major", "Family", });
23706           internal_static_hbase_pb_CompactRegionResponse_descriptor =
23707             getDescriptor().getMessageTypes().get(17);
23708           internal_static_hbase_pb_CompactRegionResponse_fieldAccessorTable = new
23709             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
23710               internal_static_hbase_pb_CompactRegionResponse_descriptor,
23711               new java.lang.String[] { });
23712           internal_static_hbase_pb_UpdateFavoredNodesRequest_descriptor =
23713             getDescriptor().getMessageTypes().get(18);
23714           internal_static_hbase_pb_UpdateFavoredNodesRequest_fieldAccessorTable = new
23715             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
23716               internal_static_hbase_pb_UpdateFavoredNodesRequest_descriptor,
23717               new java.lang.String[] { "UpdateInfo", });
23718           internal_static_hbase_pb_UpdateFavoredNodesRequest_RegionUpdateInfo_descriptor =
23719             internal_static_hbase_pb_UpdateFavoredNodesRequest_descriptor.getNestedTypes().get(0);
23720           internal_static_hbase_pb_UpdateFavoredNodesRequest_RegionUpdateInfo_fieldAccessorTable = new
23721             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
23722               internal_static_hbase_pb_UpdateFavoredNodesRequest_RegionUpdateInfo_descriptor,
23723               new java.lang.String[] { "Region", "FavoredNodes", });
23724           internal_static_hbase_pb_UpdateFavoredNodesResponse_descriptor =
23725             getDescriptor().getMessageTypes().get(19);
23726           internal_static_hbase_pb_UpdateFavoredNodesResponse_fieldAccessorTable = new
23727             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
23728               internal_static_hbase_pb_UpdateFavoredNodesResponse_descriptor,
23729               new java.lang.String[] { "Response", });
23730           internal_static_hbase_pb_MergeRegionsRequest_descriptor =
23731             getDescriptor().getMessageTypes().get(20);
23732           internal_static_hbase_pb_MergeRegionsRequest_fieldAccessorTable = new
23733             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
23734               internal_static_hbase_pb_MergeRegionsRequest_descriptor,
23735               new java.lang.String[] { "RegionA", "RegionB", "Forcible", "MasterSystemTime", });
23736           internal_static_hbase_pb_MergeRegionsResponse_descriptor =
23737             getDescriptor().getMessageTypes().get(21);
23738           internal_static_hbase_pb_MergeRegionsResponse_fieldAccessorTable = new
23739             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
23740               internal_static_hbase_pb_MergeRegionsResponse_descriptor,
23741               new java.lang.String[] { });
23742           internal_static_hbase_pb_WALEntry_descriptor =
23743             getDescriptor().getMessageTypes().get(22);
23744           internal_static_hbase_pb_WALEntry_fieldAccessorTable = new
23745             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
23746               internal_static_hbase_pb_WALEntry_descriptor,
23747               new java.lang.String[] { "Key", "KeyValueBytes", "AssociatedCellCount", });
23748           internal_static_hbase_pb_ReplicateWALEntryRequest_descriptor =
23749             getDescriptor().getMessageTypes().get(23);
23750           internal_static_hbase_pb_ReplicateWALEntryRequest_fieldAccessorTable = new
23751             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
23752               internal_static_hbase_pb_ReplicateWALEntryRequest_descriptor,
23753               new java.lang.String[] { "Entry", });
23754           internal_static_hbase_pb_ReplicateWALEntryResponse_descriptor =
23755             getDescriptor().getMessageTypes().get(24);
23756           internal_static_hbase_pb_ReplicateWALEntryResponse_fieldAccessorTable = new
23757             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
23758               internal_static_hbase_pb_ReplicateWALEntryResponse_descriptor,
23759               new java.lang.String[] { });
23760           internal_static_hbase_pb_RollWALWriterRequest_descriptor =
23761             getDescriptor().getMessageTypes().get(25);
23762           internal_static_hbase_pb_RollWALWriterRequest_fieldAccessorTable = new
23763             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
23764               internal_static_hbase_pb_RollWALWriterRequest_descriptor,
23765               new java.lang.String[] { });
23766           internal_static_hbase_pb_RollWALWriterResponse_descriptor =
23767             getDescriptor().getMessageTypes().get(26);
23768           internal_static_hbase_pb_RollWALWriterResponse_fieldAccessorTable = new
23769             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
23770               internal_static_hbase_pb_RollWALWriterResponse_descriptor,
23771               new java.lang.String[] { "RegionToFlush", });
23772           internal_static_hbase_pb_StopServerRequest_descriptor =
23773             getDescriptor().getMessageTypes().get(27);
23774           internal_static_hbase_pb_StopServerRequest_fieldAccessorTable = new
23775             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
23776               internal_static_hbase_pb_StopServerRequest_descriptor,
23777               new java.lang.String[] { "Reason", });
23778           internal_static_hbase_pb_StopServerResponse_descriptor =
23779             getDescriptor().getMessageTypes().get(28);
23780           internal_static_hbase_pb_StopServerResponse_fieldAccessorTable = new
23781             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
23782               internal_static_hbase_pb_StopServerResponse_descriptor,
23783               new java.lang.String[] { });
23784           internal_static_hbase_pb_GetServerInfoRequest_descriptor =
23785             getDescriptor().getMessageTypes().get(29);
23786           internal_static_hbase_pb_GetServerInfoRequest_fieldAccessorTable = new
23787             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
23788               internal_static_hbase_pb_GetServerInfoRequest_descriptor,
23789               new java.lang.String[] { });
23790           internal_static_hbase_pb_ServerInfo_descriptor =
23791             getDescriptor().getMessageTypes().get(30);
23792           internal_static_hbase_pb_ServerInfo_fieldAccessorTable = new
23793             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
23794               internal_static_hbase_pb_ServerInfo_descriptor,
23795               new java.lang.String[] { "ServerName", "WebuiPort", });
23796           internal_static_hbase_pb_GetServerInfoResponse_descriptor =
23797             getDescriptor().getMessageTypes().get(31);
23798           internal_static_hbase_pb_GetServerInfoResponse_fieldAccessorTable = new
23799             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
23800               internal_static_hbase_pb_GetServerInfoResponse_descriptor,
23801               new java.lang.String[] { "ServerInfo", });
23802           internal_static_hbase_pb_UpdateConfigurationRequest_descriptor =
23803             getDescriptor().getMessageTypes().get(32);
23804           internal_static_hbase_pb_UpdateConfigurationRequest_fieldAccessorTable = new
23805             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
23806               internal_static_hbase_pb_UpdateConfigurationRequest_descriptor,
23807               new java.lang.String[] { });
23808           internal_static_hbase_pb_UpdateConfigurationResponse_descriptor =
23809             getDescriptor().getMessageTypes().get(33);
23810           internal_static_hbase_pb_UpdateConfigurationResponse_fieldAccessorTable = new
23811             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
23812               internal_static_hbase_pb_UpdateConfigurationResponse_descriptor,
23813               new java.lang.String[] { });
23814           return null;
23815         }
23816       };
23817     com.google.protobuf.Descriptors.FileDescriptor
23818       .internalBuildGeneratedFileFrom(descriptorData,
23819         new com.google.protobuf.Descriptors.FileDescriptor[] {
23820           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor(),
23821           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(),
23822           org.apache.hadoop.hbase.protobuf.generated.WALProtos.getDescriptor(),
23823         }, assigner);
23824   }
23825 
23826   // @@protoc_insertion_point(outer_class_scope)
23827 }