View Javadoc

1   // Generated by the protocol buffer compiler.  DO NOT EDIT!
2   // source: Client.proto
3   
4   package org.apache.hadoop.hbase.protobuf.generated;
5   
6   public final class ClientProtos {
7     private ClientProtos() {}
8     public static void registerAllExtensions(
9         com.google.protobuf.ExtensionRegistry registry) {
10    }
11    /**
12     * Protobuf enum {@code hbase.pb.Consistency}
13     *
14     * <pre>
15     **
16     * Consistency defines the expected consistency level for an operation.
17     * </pre>
18     */
19    public enum Consistency
20        implements com.google.protobuf.ProtocolMessageEnum {
21      /**
22       * <code>STRONG = 0;</code>
23       */
24      STRONG(0, 0),
25      /**
26       * <code>TIMELINE = 1;</code>
27       */
28      TIMELINE(1, 1),
29      ;
30  
31      /**
32       * <code>STRONG = 0;</code>
33       */
34      public static final int STRONG_VALUE = 0;
35      /**
36       * <code>TIMELINE = 1;</code>
37       */
38      public static final int TIMELINE_VALUE = 1;
39  
40  
41      public final int getNumber() { return value; }
42  
43      public static Consistency valueOf(int value) {
44        switch (value) {
45          case 0: return STRONG;
46          case 1: return TIMELINE;
47          default: return null;
48        }
49      }
50  
51      public static com.google.protobuf.Internal.EnumLiteMap<Consistency>
52          internalGetValueMap() {
53        return internalValueMap;
54      }
55      private static com.google.protobuf.Internal.EnumLiteMap<Consistency>
56          internalValueMap =
57            new com.google.protobuf.Internal.EnumLiteMap<Consistency>() {
58              public Consistency findValueByNumber(int number) {
59                return Consistency.valueOf(number);
60              }
61            };
62  
63      public final com.google.protobuf.Descriptors.EnumValueDescriptor
64          getValueDescriptor() {
65        return getDescriptor().getValues().get(index);
66      }
67      public final com.google.protobuf.Descriptors.EnumDescriptor
68          getDescriptorForType() {
69        return getDescriptor();
70      }
71      public static final com.google.protobuf.Descriptors.EnumDescriptor
72          getDescriptor() {
73        return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor().getEnumTypes().get(0);
74      }
75  
76      private static final Consistency[] VALUES = values();
77  
78      public static Consistency valueOf(
79          com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
80        if (desc.getType() != getDescriptor()) {
81          throw new java.lang.IllegalArgumentException(
82            "EnumValueDescriptor is not for this type.");
83        }
84        return VALUES[desc.getIndex()];
85      }
86  
87      private final int index;
88      private final int value;
89  
90      private Consistency(int index, int value) {
91        this.index = index;
92        this.value = value;
93      }
94  
95      // @@protoc_insertion_point(enum_scope:hbase.pb.Consistency)
96    }
97  
98    public interface AuthorizationsOrBuilder
99        extends com.google.protobuf.MessageOrBuilder {
100 
101     // repeated string label = 1;
102     /**
103      * <code>repeated string label = 1;</code>
104      */
105     java.util.List<java.lang.String>
106     getLabelList();
107     /**
108      * <code>repeated string label = 1;</code>
109      */
110     int getLabelCount();
111     /**
112      * <code>repeated string label = 1;</code>
113      */
114     java.lang.String getLabel(int index);
115     /**
116      * <code>repeated string label = 1;</code>
117      */
118     com.google.protobuf.ByteString
119         getLabelBytes(int index);
120   }
121   /**
122    * Protobuf type {@code hbase.pb.Authorizations}
123    *
124    * <pre>
125    **
126    * The protocol buffer version of Authorizations.
127    * </pre>
128    */
129   public static final class Authorizations extends
130       com.google.protobuf.GeneratedMessage
131       implements AuthorizationsOrBuilder {
132     // Use Authorizations.newBuilder() to construct.
133     private Authorizations(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
134       super(builder);
135       this.unknownFields = builder.getUnknownFields();
136     }
137     private Authorizations(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
138 
139     private static final Authorizations defaultInstance;
140     public static Authorizations getDefaultInstance() {
141       return defaultInstance;
142     }
143 
144     public Authorizations getDefaultInstanceForType() {
145       return defaultInstance;
146     }
147 
148     private final com.google.protobuf.UnknownFieldSet unknownFields;
149     @java.lang.Override
150     public final com.google.protobuf.UnknownFieldSet
151         getUnknownFields() {
152       return this.unknownFields;
153     }
154     private Authorizations(
155         com.google.protobuf.CodedInputStream input,
156         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
157         throws com.google.protobuf.InvalidProtocolBufferException {
158       initFields();
159       int mutable_bitField0_ = 0;
160       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
161           com.google.protobuf.UnknownFieldSet.newBuilder();
162       try {
163         boolean done = false;
164         while (!done) {
165           int tag = input.readTag();
166           switch (tag) {
167             case 0:
168               done = true;
169               break;
170             default: {
171               if (!parseUnknownField(input, unknownFields,
172                                      extensionRegistry, tag)) {
173                 done = true;
174               }
175               break;
176             }
177             case 10: {
178               if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
179                 label_ = new com.google.protobuf.LazyStringArrayList();
180                 mutable_bitField0_ |= 0x00000001;
181               }
182               label_.add(input.readBytes());
183               break;
184             }
185           }
186         }
187       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
188         throw e.setUnfinishedMessage(this);
189       } catch (java.io.IOException e) {
190         throw new com.google.protobuf.InvalidProtocolBufferException(
191             e.getMessage()).setUnfinishedMessage(this);
192       } finally {
193         if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
194           label_ = new com.google.protobuf.UnmodifiableLazyStringList(label_);
195         }
196         this.unknownFields = unknownFields.build();
197         makeExtensionsImmutable();
198       }
199     }
200     public static final com.google.protobuf.Descriptors.Descriptor
201         getDescriptor() {
202       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Authorizations_descriptor;
203     }
204 
205     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
206         internalGetFieldAccessorTable() {
207       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Authorizations_fieldAccessorTable
208           .ensureFieldAccessorsInitialized(
209               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations.Builder.class);
210     }
211 
212     public static com.google.protobuf.Parser<Authorizations> PARSER =
213         new com.google.protobuf.AbstractParser<Authorizations>() {
214       public Authorizations parsePartialFrom(
215           com.google.protobuf.CodedInputStream input,
216           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
217           throws com.google.protobuf.InvalidProtocolBufferException {
218         return new Authorizations(input, extensionRegistry);
219       }
220     };
221 
222     @java.lang.Override
223     public com.google.protobuf.Parser<Authorizations> getParserForType() {
224       return PARSER;
225     }
226 
227     // repeated string label = 1;
228     public static final int LABEL_FIELD_NUMBER = 1;
229     private com.google.protobuf.LazyStringList label_;
230     /**
231      * <code>repeated string label = 1;</code>
232      */
233     public java.util.List<java.lang.String>
234         getLabelList() {
235       return label_;
236     }
237     /**
238      * <code>repeated string label = 1;</code>
239      */
240     public int getLabelCount() {
241       return label_.size();
242     }
243     /**
244      * <code>repeated string label = 1;</code>
245      */
246     public java.lang.String getLabel(int index) {
247       return label_.get(index);
248     }
249     /**
250      * <code>repeated string label = 1;</code>
251      */
252     public com.google.protobuf.ByteString
253         getLabelBytes(int index) {
254       return label_.getByteString(index);
255     }
256 
257     private void initFields() {
258       label_ = com.google.protobuf.LazyStringArrayList.EMPTY;
259     }
260     private byte memoizedIsInitialized = -1;
261     public final boolean isInitialized() {
262       byte isInitialized = memoizedIsInitialized;
263       if (isInitialized != -1) return isInitialized == 1;
264 
265       memoizedIsInitialized = 1;
266       return true;
267     }
268 
269     public void writeTo(com.google.protobuf.CodedOutputStream output)
270                         throws java.io.IOException {
271       getSerializedSize();
272       for (int i = 0; i < label_.size(); i++) {
273         output.writeBytes(1, label_.getByteString(i));
274       }
275       getUnknownFields().writeTo(output);
276     }
277 
278     private int memoizedSerializedSize = -1;
279     public int getSerializedSize() {
280       int size = memoizedSerializedSize;
281       if (size != -1) return size;
282 
283       size = 0;
284       {
285         int dataSize = 0;
286         for (int i = 0; i < label_.size(); i++) {
287           dataSize += com.google.protobuf.CodedOutputStream
288             .computeBytesSizeNoTag(label_.getByteString(i));
289         }
290         size += dataSize;
291         size += 1 * getLabelList().size();
292       }
293       size += getUnknownFields().getSerializedSize();
294       memoizedSerializedSize = size;
295       return size;
296     }
297 
298     private static final long serialVersionUID = 0L;
299     @java.lang.Override
300     protected java.lang.Object writeReplace()
301         throws java.io.ObjectStreamException {
302       return super.writeReplace();
303     }
304 
305     @java.lang.Override
306     public boolean equals(final java.lang.Object obj) {
307       if (obj == this) {
308        return true;
309       }
310       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations)) {
311         return super.equals(obj);
312       }
313       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations) obj;
314 
315       boolean result = true;
316       result = result && getLabelList()
317           .equals(other.getLabelList());
318       result = result &&
319           getUnknownFields().equals(other.getUnknownFields());
320       return result;
321     }
322 
323     private int memoizedHashCode = 0;
324     @java.lang.Override
325     public int hashCode() {
326       if (memoizedHashCode != 0) {
327         return memoizedHashCode;
328       }
329       int hash = 41;
330       hash = (19 * hash) + getDescriptorForType().hashCode();
331       if (getLabelCount() > 0) {
332         hash = (37 * hash) + LABEL_FIELD_NUMBER;
333         hash = (53 * hash) + getLabelList().hashCode();
334       }
335       hash = (29 * hash) + getUnknownFields().hashCode();
336       memoizedHashCode = hash;
337       return hash;
338     }
339 
340     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseFrom(
341         com.google.protobuf.ByteString data)
342         throws com.google.protobuf.InvalidProtocolBufferException {
343       return PARSER.parseFrom(data);
344     }
345     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseFrom(
346         com.google.protobuf.ByteString data,
347         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
348         throws com.google.protobuf.InvalidProtocolBufferException {
349       return PARSER.parseFrom(data, extensionRegistry);
350     }
351     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseFrom(byte[] data)
352         throws com.google.protobuf.InvalidProtocolBufferException {
353       return PARSER.parseFrom(data);
354     }
355     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseFrom(
356         byte[] data,
357         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
358         throws com.google.protobuf.InvalidProtocolBufferException {
359       return PARSER.parseFrom(data, extensionRegistry);
360     }
361     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseFrom(java.io.InputStream input)
362         throws java.io.IOException {
363       return PARSER.parseFrom(input);
364     }
365     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseFrom(
366         java.io.InputStream input,
367         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
368         throws java.io.IOException {
369       return PARSER.parseFrom(input, extensionRegistry);
370     }
371     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseDelimitedFrom(java.io.InputStream input)
372         throws java.io.IOException {
373       return PARSER.parseDelimitedFrom(input);
374     }
375     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseDelimitedFrom(
376         java.io.InputStream input,
377         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
378         throws java.io.IOException {
379       return PARSER.parseDelimitedFrom(input, extensionRegistry);
380     }
381     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseFrom(
382         com.google.protobuf.CodedInputStream input)
383         throws java.io.IOException {
384       return PARSER.parseFrom(input);
385     }
386     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseFrom(
387         com.google.protobuf.CodedInputStream input,
388         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
389         throws java.io.IOException {
390       return PARSER.parseFrom(input, extensionRegistry);
391     }
392 
393     public static Builder newBuilder() { return Builder.create(); }
394     public Builder newBuilderForType() { return newBuilder(); }
395     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations prototype) {
396       return newBuilder().mergeFrom(prototype);
397     }
398     public Builder toBuilder() { return newBuilder(this); }
399 
400     @java.lang.Override
401     protected Builder newBuilderForType(
402         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
403       Builder builder = new Builder(parent);
404       return builder;
405     }
406     /**
407      * Protobuf type {@code hbase.pb.Authorizations}
408      *
409      * <pre>
410      **
411      * The protocol buffer version of Authorizations.
412      * </pre>
413      */
414     public static final class Builder extends
415         com.google.protobuf.GeneratedMessage.Builder<Builder>
416        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.AuthorizationsOrBuilder {
417       public static final com.google.protobuf.Descriptors.Descriptor
418           getDescriptor() {
419         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Authorizations_descriptor;
420       }
421 
422       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
423           internalGetFieldAccessorTable() {
424         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Authorizations_fieldAccessorTable
425             .ensureFieldAccessorsInitialized(
426                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations.Builder.class);
427       }
428 
429       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations.newBuilder()
430       private Builder() {
431         maybeForceBuilderInitialization();
432       }
433 
434       private Builder(
435           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
436         super(parent);
437         maybeForceBuilderInitialization();
438       }
439       private void maybeForceBuilderInitialization() {
440         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
441         }
442       }
443       private static Builder create() {
444         return new Builder();
445       }
446 
447       public Builder clear() {
448         super.clear();
449         label_ = com.google.protobuf.LazyStringArrayList.EMPTY;
450         bitField0_ = (bitField0_ & ~0x00000001);
451         return this;
452       }
453 
454       public Builder clone() {
455         return create().mergeFrom(buildPartial());
456       }
457 
458       public com.google.protobuf.Descriptors.Descriptor
459           getDescriptorForType() {
460         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Authorizations_descriptor;
461       }
462 
463       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations getDefaultInstanceForType() {
464         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations.getDefaultInstance();
465       }
466 
467       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations build() {
468         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations result = buildPartial();
469         if (!result.isInitialized()) {
470           throw newUninitializedMessageException(result);
471         }
472         return result;
473       }
474 
475       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations buildPartial() {
476         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations(this);
477         int from_bitField0_ = bitField0_;
478         if (((bitField0_ & 0x00000001) == 0x00000001)) {
479           label_ = new com.google.protobuf.UnmodifiableLazyStringList(
480               label_);
481           bitField0_ = (bitField0_ & ~0x00000001);
482         }
483         result.label_ = label_;
484         onBuilt();
485         return result;
486       }
487 
488       public Builder mergeFrom(com.google.protobuf.Message other) {
489         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations) {
490           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations)other);
491         } else {
492           super.mergeFrom(other);
493           return this;
494         }
495       }
496 
497       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations other) {
498         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations.getDefaultInstance()) return this;
499         if (!other.label_.isEmpty()) {
500           if (label_.isEmpty()) {
501             label_ = other.label_;
502             bitField0_ = (bitField0_ & ~0x00000001);
503           } else {
504             ensureLabelIsMutable();
505             label_.addAll(other.label_);
506           }
507           onChanged();
508         }
509         this.mergeUnknownFields(other.getUnknownFields());
510         return this;
511       }
512 
513       public final boolean isInitialized() {
514         return true;
515       }
516 
517       public Builder mergeFrom(
518           com.google.protobuf.CodedInputStream input,
519           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
520           throws java.io.IOException {
521         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parsedMessage = null;
522         try {
523           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
524         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
525           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations) e.getUnfinishedMessage();
526           throw e;
527         } finally {
528           if (parsedMessage != null) {
529             mergeFrom(parsedMessage);
530           }
531         }
532         return this;
533       }
534       private int bitField0_;
535 
536       // repeated string label = 1;
537       private com.google.protobuf.LazyStringList label_ = com.google.protobuf.LazyStringArrayList.EMPTY;
538       private void ensureLabelIsMutable() {
539         if (!((bitField0_ & 0x00000001) == 0x00000001)) {
540           label_ = new com.google.protobuf.LazyStringArrayList(label_);
541           bitField0_ |= 0x00000001;
542          }
543       }
544       /**
545        * <code>repeated string label = 1;</code>
546        */
547       public java.util.List<java.lang.String>
548           getLabelList() {
549         return java.util.Collections.unmodifiableList(label_);
550       }
551       /**
552        * <code>repeated string label = 1;</code>
553        */
554       public int getLabelCount() {
555         return label_.size();
556       }
557       /**
558        * <code>repeated string label = 1;</code>
559        */
560       public java.lang.String getLabel(int index) {
561         return label_.get(index);
562       }
563       /**
564        * <code>repeated string label = 1;</code>
565        */
566       public com.google.protobuf.ByteString
567           getLabelBytes(int index) {
568         return label_.getByteString(index);
569       }
570       /**
571        * <code>repeated string label = 1;</code>
572        */
573       public Builder setLabel(
574           int index, java.lang.String value) {
575         if (value == null) {
576     throw new NullPointerException();
577   }
578   ensureLabelIsMutable();
579         label_.set(index, value);
580         onChanged();
581         return this;
582       }
583       /**
584        * <code>repeated string label = 1;</code>
585        */
586       public Builder addLabel(
587           java.lang.String value) {
588         if (value == null) {
589     throw new NullPointerException();
590   }
591   ensureLabelIsMutable();
592         label_.add(value);
593         onChanged();
594         return this;
595       }
596       /**
597        * <code>repeated string label = 1;</code>
598        */
599       public Builder addAllLabel(
600           java.lang.Iterable<java.lang.String> values) {
601         ensureLabelIsMutable();
602         super.addAll(values, label_);
603         onChanged();
604         return this;
605       }
606       /**
607        * <code>repeated string label = 1;</code>
608        */
609       public Builder clearLabel() {
610         label_ = com.google.protobuf.LazyStringArrayList.EMPTY;
611         bitField0_ = (bitField0_ & ~0x00000001);
612         onChanged();
613         return this;
614       }
615       /**
616        * <code>repeated string label = 1;</code>
617        */
618       public Builder addLabelBytes(
619           com.google.protobuf.ByteString value) {
620         if (value == null) {
621     throw new NullPointerException();
622   }
623   ensureLabelIsMutable();
624         label_.add(value);
625         onChanged();
626         return this;
627       }
628 
629       // @@protoc_insertion_point(builder_scope:hbase.pb.Authorizations)
630     }
631 
632     static {
633       defaultInstance = new Authorizations(true);
634       defaultInstance.initFields();
635     }
636 
637     // @@protoc_insertion_point(class_scope:hbase.pb.Authorizations)
638   }
639 
640   public interface CellVisibilityOrBuilder
641       extends com.google.protobuf.MessageOrBuilder {
642 
643     // required string expression = 1;
644     /**
645      * <code>required string expression = 1;</code>
646      */
647     boolean hasExpression();
648     /**
649      * <code>required string expression = 1;</code>
650      */
651     java.lang.String getExpression();
652     /**
653      * <code>required string expression = 1;</code>
654      */
655     com.google.protobuf.ByteString
656         getExpressionBytes();
657   }
658   /**
659    * Protobuf type {@code hbase.pb.CellVisibility}
660    *
661    * <pre>
662    **
663    * The protocol buffer version of CellVisibility.
664    * </pre>
665    */
666   public static final class CellVisibility extends
667       com.google.protobuf.GeneratedMessage
668       implements CellVisibilityOrBuilder {
669     // Use CellVisibility.newBuilder() to construct.
670     private CellVisibility(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
671       super(builder);
672       this.unknownFields = builder.getUnknownFields();
673     }
674     private CellVisibility(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
675 
676     private static final CellVisibility defaultInstance;
677     public static CellVisibility getDefaultInstance() {
678       return defaultInstance;
679     }
680 
681     public CellVisibility getDefaultInstanceForType() {
682       return defaultInstance;
683     }
684 
685     private final com.google.protobuf.UnknownFieldSet unknownFields;
686     @java.lang.Override
687     public final com.google.protobuf.UnknownFieldSet
688         getUnknownFields() {
689       return this.unknownFields;
690     }
691     private CellVisibility(
692         com.google.protobuf.CodedInputStream input,
693         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
694         throws com.google.protobuf.InvalidProtocolBufferException {
695       initFields();
696       int mutable_bitField0_ = 0;
697       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
698           com.google.protobuf.UnknownFieldSet.newBuilder();
699       try {
700         boolean done = false;
701         while (!done) {
702           int tag = input.readTag();
703           switch (tag) {
704             case 0:
705               done = true;
706               break;
707             default: {
708               if (!parseUnknownField(input, unknownFields,
709                                      extensionRegistry, tag)) {
710                 done = true;
711               }
712               break;
713             }
714             case 10: {
715               bitField0_ |= 0x00000001;
716               expression_ = input.readBytes();
717               break;
718             }
719           }
720         }
721       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
722         throw e.setUnfinishedMessage(this);
723       } catch (java.io.IOException e) {
724         throw new com.google.protobuf.InvalidProtocolBufferException(
725             e.getMessage()).setUnfinishedMessage(this);
726       } finally {
727         this.unknownFields = unknownFields.build();
728         makeExtensionsImmutable();
729       }
730     }
731     public static final com.google.protobuf.Descriptors.Descriptor
732         getDescriptor() {
733       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CellVisibility_descriptor;
734     }
735 
736     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
737         internalGetFieldAccessorTable() {
738       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CellVisibility_fieldAccessorTable
739           .ensureFieldAccessorsInitialized(
740               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility.Builder.class);
741     }
742 
743     public static com.google.protobuf.Parser<CellVisibility> PARSER =
744         new com.google.protobuf.AbstractParser<CellVisibility>() {
745       public CellVisibility parsePartialFrom(
746           com.google.protobuf.CodedInputStream input,
747           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
748           throws com.google.protobuf.InvalidProtocolBufferException {
749         return new CellVisibility(input, extensionRegistry);
750       }
751     };
752 
753     @java.lang.Override
754     public com.google.protobuf.Parser<CellVisibility> getParserForType() {
755       return PARSER;
756     }
757 
758     private int bitField0_;
759     // required string expression = 1;
760     public static final int EXPRESSION_FIELD_NUMBER = 1;
761     private java.lang.Object expression_;
762     /**
763      * <code>required string expression = 1;</code>
764      */
765     public boolean hasExpression() {
766       return ((bitField0_ & 0x00000001) == 0x00000001);
767     }
768     /**
769      * <code>required string expression = 1;</code>
770      */
771     public java.lang.String getExpression() {
772       java.lang.Object ref = expression_;
773       if (ref instanceof java.lang.String) {
774         return (java.lang.String) ref;
775       } else {
776         com.google.protobuf.ByteString bs = 
777             (com.google.protobuf.ByteString) ref;
778         java.lang.String s = bs.toStringUtf8();
779         if (bs.isValidUtf8()) {
780           expression_ = s;
781         }
782         return s;
783       }
784     }
785     /**
786      * <code>required string expression = 1;</code>
787      */
788     public com.google.protobuf.ByteString
789         getExpressionBytes() {
790       java.lang.Object ref = expression_;
791       if (ref instanceof java.lang.String) {
792         com.google.protobuf.ByteString b = 
793             com.google.protobuf.ByteString.copyFromUtf8(
794                 (java.lang.String) ref);
795         expression_ = b;
796         return b;
797       } else {
798         return (com.google.protobuf.ByteString) ref;
799       }
800     }
801 
802     private void initFields() {
803       expression_ = "";
804     }
805     private byte memoizedIsInitialized = -1;
806     public final boolean isInitialized() {
807       byte isInitialized = memoizedIsInitialized;
808       if (isInitialized != -1) return isInitialized == 1;
809 
810       if (!hasExpression()) {
811         memoizedIsInitialized = 0;
812         return false;
813       }
814       memoizedIsInitialized = 1;
815       return true;
816     }
817 
818     public void writeTo(com.google.protobuf.CodedOutputStream output)
819                         throws java.io.IOException {
820       getSerializedSize();
821       if (((bitField0_ & 0x00000001) == 0x00000001)) {
822         output.writeBytes(1, getExpressionBytes());
823       }
824       getUnknownFields().writeTo(output);
825     }
826 
827     private int memoizedSerializedSize = -1;
828     public int getSerializedSize() {
829       int size = memoizedSerializedSize;
830       if (size != -1) return size;
831 
832       size = 0;
833       if (((bitField0_ & 0x00000001) == 0x00000001)) {
834         size += com.google.protobuf.CodedOutputStream
835           .computeBytesSize(1, getExpressionBytes());
836       }
837       size += getUnknownFields().getSerializedSize();
838       memoizedSerializedSize = size;
839       return size;
840     }
841 
842     private static final long serialVersionUID = 0L;
843     @java.lang.Override
844     protected java.lang.Object writeReplace()
845         throws java.io.ObjectStreamException {
846       return super.writeReplace();
847     }
848 
849     @java.lang.Override
850     public boolean equals(final java.lang.Object obj) {
851       if (obj == this) {
852        return true;
853       }
854       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility)) {
855         return super.equals(obj);
856       }
857       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility) obj;
858 
859       boolean result = true;
860       result = result && (hasExpression() == other.hasExpression());
861       if (hasExpression()) {
862         result = result && getExpression()
863             .equals(other.getExpression());
864       }
865       result = result &&
866           getUnknownFields().equals(other.getUnknownFields());
867       return result;
868     }
869 
870     private int memoizedHashCode = 0;
871     @java.lang.Override
872     public int hashCode() {
873       if (memoizedHashCode != 0) {
874         return memoizedHashCode;
875       }
876       int hash = 41;
877       hash = (19 * hash) + getDescriptorForType().hashCode();
878       if (hasExpression()) {
879         hash = (37 * hash) + EXPRESSION_FIELD_NUMBER;
880         hash = (53 * hash) + getExpression().hashCode();
881       }
882       hash = (29 * hash) + getUnknownFields().hashCode();
883       memoizedHashCode = hash;
884       return hash;
885     }
886 
887     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseFrom(
888         com.google.protobuf.ByteString data)
889         throws com.google.protobuf.InvalidProtocolBufferException {
890       return PARSER.parseFrom(data);
891     }
892     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseFrom(
893         com.google.protobuf.ByteString data,
894         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
895         throws com.google.protobuf.InvalidProtocolBufferException {
896       return PARSER.parseFrom(data, extensionRegistry);
897     }
898     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseFrom(byte[] data)
899         throws com.google.protobuf.InvalidProtocolBufferException {
900       return PARSER.parseFrom(data);
901     }
902     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseFrom(
903         byte[] data,
904         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
905         throws com.google.protobuf.InvalidProtocolBufferException {
906       return PARSER.parseFrom(data, extensionRegistry);
907     }
908     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseFrom(java.io.InputStream input)
909         throws java.io.IOException {
910       return PARSER.parseFrom(input);
911     }
912     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseFrom(
913         java.io.InputStream input,
914         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
915         throws java.io.IOException {
916       return PARSER.parseFrom(input, extensionRegistry);
917     }
918     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseDelimitedFrom(java.io.InputStream input)
919         throws java.io.IOException {
920       return PARSER.parseDelimitedFrom(input);
921     }
922     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseDelimitedFrom(
923         java.io.InputStream input,
924         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
925         throws java.io.IOException {
926       return PARSER.parseDelimitedFrom(input, extensionRegistry);
927     }
928     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseFrom(
929         com.google.protobuf.CodedInputStream input)
930         throws java.io.IOException {
931       return PARSER.parseFrom(input);
932     }
933     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseFrom(
934         com.google.protobuf.CodedInputStream input,
935         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
936         throws java.io.IOException {
937       return PARSER.parseFrom(input, extensionRegistry);
938     }
939 
940     public static Builder newBuilder() { return Builder.create(); }
941     public Builder newBuilderForType() { return newBuilder(); }
942     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility prototype) {
943       return newBuilder().mergeFrom(prototype);
944     }
945     public Builder toBuilder() { return newBuilder(this); }
946 
947     @java.lang.Override
948     protected Builder newBuilderForType(
949         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
950       Builder builder = new Builder(parent);
951       return builder;
952     }
953     /**
954      * Protobuf type {@code hbase.pb.CellVisibility}
955      *
956      * <pre>
957      **
958      * The protocol buffer version of CellVisibility.
959      * </pre>
960      */
961     public static final class Builder extends
962         com.google.protobuf.GeneratedMessage.Builder<Builder>
963        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibilityOrBuilder {
964       public static final com.google.protobuf.Descriptors.Descriptor
965           getDescriptor() {
966         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CellVisibility_descriptor;
967       }
968 
969       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
970           internalGetFieldAccessorTable() {
971         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CellVisibility_fieldAccessorTable
972             .ensureFieldAccessorsInitialized(
973                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility.Builder.class);
974       }
975 
976       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility.newBuilder()
977       private Builder() {
978         maybeForceBuilderInitialization();
979       }
980 
981       private Builder(
982           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
983         super(parent);
984         maybeForceBuilderInitialization();
985       }
986       private void maybeForceBuilderInitialization() {
987         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
988         }
989       }
990       private static Builder create() {
991         return new Builder();
992       }
993 
994       public Builder clear() {
995         super.clear();
996         expression_ = "";
997         bitField0_ = (bitField0_ & ~0x00000001);
998         return this;
999       }
1000 
1001       public Builder clone() {
1002         return create().mergeFrom(buildPartial());
1003       }
1004 
1005       public com.google.protobuf.Descriptors.Descriptor
1006           getDescriptorForType() {
1007         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CellVisibility_descriptor;
1008       }
1009 
1010       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility getDefaultInstanceForType() {
1011         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility.getDefaultInstance();
1012       }
1013 
1014       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility build() {
1015         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility result = buildPartial();
1016         if (!result.isInitialized()) {
1017           throw newUninitializedMessageException(result);
1018         }
1019         return result;
1020       }
1021 
1022       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility buildPartial() {
1023         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility(this);
1024         int from_bitField0_ = bitField0_;
1025         int to_bitField0_ = 0;
1026         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
1027           to_bitField0_ |= 0x00000001;
1028         }
1029         result.expression_ = expression_;
1030         result.bitField0_ = to_bitField0_;
1031         onBuilt();
1032         return result;
1033       }
1034 
1035       public Builder mergeFrom(com.google.protobuf.Message other) {
1036         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility) {
1037           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility)other);
1038         } else {
1039           super.mergeFrom(other);
1040           return this;
1041         }
1042       }
1043 
1044       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility other) {
1045         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility.getDefaultInstance()) return this;
1046         if (other.hasExpression()) {
1047           bitField0_ |= 0x00000001;
1048           expression_ = other.expression_;
1049           onChanged();
1050         }
1051         this.mergeUnknownFields(other.getUnknownFields());
1052         return this;
1053       }
1054 
1055       public final boolean isInitialized() {
1056         if (!hasExpression()) {
1057           
1058           return false;
1059         }
1060         return true;
1061       }
1062 
1063       public Builder mergeFrom(
1064           com.google.protobuf.CodedInputStream input,
1065           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1066           throws java.io.IOException {
1067         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parsedMessage = null;
1068         try {
1069           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
1070         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1071           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility) e.getUnfinishedMessage();
1072           throw e;
1073         } finally {
1074           if (parsedMessage != null) {
1075             mergeFrom(parsedMessage);
1076           }
1077         }
1078         return this;
1079       }
1080       private int bitField0_;
1081 
1082       // required string expression = 1;
1083       private java.lang.Object expression_ = "";
1084       /**
1085        * <code>required string expression = 1;</code>
1086        */
1087       public boolean hasExpression() {
1088         return ((bitField0_ & 0x00000001) == 0x00000001);
1089       }
1090       /**
1091        * <code>required string expression = 1;</code>
1092        */
1093       public java.lang.String getExpression() {
1094         java.lang.Object ref = expression_;
1095         if (!(ref instanceof java.lang.String)) {
1096           java.lang.String s = ((com.google.protobuf.ByteString) ref)
1097               .toStringUtf8();
1098           expression_ = s;
1099           return s;
1100         } else {
1101           return (java.lang.String) ref;
1102         }
1103       }
1104       /**
1105        * <code>required string expression = 1;</code>
1106        */
1107       public com.google.protobuf.ByteString
1108           getExpressionBytes() {
1109         java.lang.Object ref = expression_;
1110         if (ref instanceof String) {
1111           com.google.protobuf.ByteString b = 
1112               com.google.protobuf.ByteString.copyFromUtf8(
1113                   (java.lang.String) ref);
1114           expression_ = b;
1115           return b;
1116         } else {
1117           return (com.google.protobuf.ByteString) ref;
1118         }
1119       }
1120       /**
1121        * <code>required string expression = 1;</code>
1122        */
1123       public Builder setExpression(
1124           java.lang.String value) {
1125         if (value == null) {
1126     throw new NullPointerException();
1127   }
1128   bitField0_ |= 0x00000001;
1129         expression_ = value;
1130         onChanged();
1131         return this;
1132       }
1133       /**
1134        * <code>required string expression = 1;</code>
1135        */
1136       public Builder clearExpression() {
1137         bitField0_ = (bitField0_ & ~0x00000001);
1138         expression_ = getDefaultInstance().getExpression();
1139         onChanged();
1140         return this;
1141       }
1142       /**
1143        * <code>required string expression = 1;</code>
1144        */
1145       public Builder setExpressionBytes(
1146           com.google.protobuf.ByteString value) {
1147         if (value == null) {
1148     throw new NullPointerException();
1149   }
1150   bitField0_ |= 0x00000001;
1151         expression_ = value;
1152         onChanged();
1153         return this;
1154       }
1155 
1156       // @@protoc_insertion_point(builder_scope:hbase.pb.CellVisibility)
1157     }
1158 
1159     static {
1160       defaultInstance = new CellVisibility(true);
1161       defaultInstance.initFields();
1162     }
1163 
1164     // @@protoc_insertion_point(class_scope:hbase.pb.CellVisibility)
1165   }
1166 
1167   public interface ColumnOrBuilder
1168       extends com.google.protobuf.MessageOrBuilder {
1169 
1170     // required bytes family = 1;
1171     /**
1172      * <code>required bytes family = 1;</code>
1173      */
1174     boolean hasFamily();
1175     /**
1176      * <code>required bytes family = 1;</code>
1177      */
1178     com.google.protobuf.ByteString getFamily();
1179 
1180     // repeated bytes qualifier = 2;
1181     /**
1182      * <code>repeated bytes qualifier = 2;</code>
1183      */
1184     java.util.List<com.google.protobuf.ByteString> getQualifierList();
1185     /**
1186      * <code>repeated bytes qualifier = 2;</code>
1187      */
1188     int getQualifierCount();
1189     /**
1190      * <code>repeated bytes qualifier = 2;</code>
1191      */
1192     com.google.protobuf.ByteString getQualifier(int index);
1193   }
1194   /**
1195    * Protobuf type {@code hbase.pb.Column}
1196    *
1197    * <pre>
1198    **
1199    * Container for a list of column qualifier names of a family.
1200    * </pre>
1201    */
1202   public static final class Column extends
1203       com.google.protobuf.GeneratedMessage
1204       implements ColumnOrBuilder {
1205     // Use Column.newBuilder() to construct.
1206     private Column(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
1207       super(builder);
1208       this.unknownFields = builder.getUnknownFields();
1209     }
1210     private Column(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
1211 
1212     private static final Column defaultInstance;
1213     public static Column getDefaultInstance() {
1214       return defaultInstance;
1215     }
1216 
1217     public Column getDefaultInstanceForType() {
1218       return defaultInstance;
1219     }
1220 
1221     private final com.google.protobuf.UnknownFieldSet unknownFields;
1222     @java.lang.Override
1223     public final com.google.protobuf.UnknownFieldSet
1224         getUnknownFields() {
1225       return this.unknownFields;
1226     }
1227     private Column(
1228         com.google.protobuf.CodedInputStream input,
1229         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1230         throws com.google.protobuf.InvalidProtocolBufferException {
1231       initFields();
1232       int mutable_bitField0_ = 0;
1233       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
1234           com.google.protobuf.UnknownFieldSet.newBuilder();
1235       try {
1236         boolean done = false;
1237         while (!done) {
1238           int tag = input.readTag();
1239           switch (tag) {
1240             case 0:
1241               done = true;
1242               break;
1243             default: {
1244               if (!parseUnknownField(input, unknownFields,
1245                                      extensionRegistry, tag)) {
1246                 done = true;
1247               }
1248               break;
1249             }
1250             case 10: {
1251               bitField0_ |= 0x00000001;
1252               family_ = input.readBytes();
1253               break;
1254             }
1255             case 18: {
1256               if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
1257                 qualifier_ = new java.util.ArrayList<com.google.protobuf.ByteString>();
1258                 mutable_bitField0_ |= 0x00000002;
1259               }
1260               qualifier_.add(input.readBytes());
1261               break;
1262             }
1263           }
1264         }
1265       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1266         throw e.setUnfinishedMessage(this);
1267       } catch (java.io.IOException e) {
1268         throw new com.google.protobuf.InvalidProtocolBufferException(
1269             e.getMessage()).setUnfinishedMessage(this);
1270       } finally {
1271         if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
1272           qualifier_ = java.util.Collections.unmodifiableList(qualifier_);
1273         }
1274         this.unknownFields = unknownFields.build();
1275         makeExtensionsImmutable();
1276       }
1277     }
1278     public static final com.google.protobuf.Descriptors.Descriptor
1279         getDescriptor() {
1280       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Column_descriptor;
1281     }
1282 
1283     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1284         internalGetFieldAccessorTable() {
1285       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Column_fieldAccessorTable
1286           .ensureFieldAccessorsInitialized(
1287               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder.class);
1288     }
1289 
1290     public static com.google.protobuf.Parser<Column> PARSER =
1291         new com.google.protobuf.AbstractParser<Column>() {
1292       public Column parsePartialFrom(
1293           com.google.protobuf.CodedInputStream input,
1294           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1295           throws com.google.protobuf.InvalidProtocolBufferException {
1296         return new Column(input, extensionRegistry);
1297       }
1298     };
1299 
1300     @java.lang.Override
1301     public com.google.protobuf.Parser<Column> getParserForType() {
1302       return PARSER;
1303     }
1304 
1305     private int bitField0_;
1306     // required bytes family = 1;
1307     public static final int FAMILY_FIELD_NUMBER = 1;
1308     private com.google.protobuf.ByteString family_;
1309     /**
1310      * <code>required bytes family = 1;</code>
1311      */
1312     public boolean hasFamily() {
1313       return ((bitField0_ & 0x00000001) == 0x00000001);
1314     }
1315     /**
1316      * <code>required bytes family = 1;</code>
1317      */
1318     public com.google.protobuf.ByteString getFamily() {
1319       return family_;
1320     }
1321 
1322     // repeated bytes qualifier = 2;
1323     public static final int QUALIFIER_FIELD_NUMBER = 2;
1324     private java.util.List<com.google.protobuf.ByteString> qualifier_;
1325     /**
1326      * <code>repeated bytes qualifier = 2;</code>
1327      */
1328     public java.util.List<com.google.protobuf.ByteString>
1329         getQualifierList() {
1330       return qualifier_;
1331     }
1332     /**
1333      * <code>repeated bytes qualifier = 2;</code>
1334      */
1335     public int getQualifierCount() {
1336       return qualifier_.size();
1337     }
1338     /**
1339      * <code>repeated bytes qualifier = 2;</code>
1340      */
1341     public com.google.protobuf.ByteString getQualifier(int index) {
1342       return qualifier_.get(index);
1343     }
1344 
1345     private void initFields() {
1346       family_ = com.google.protobuf.ByteString.EMPTY;
1347       qualifier_ = java.util.Collections.emptyList();
1348     }
1349     private byte memoizedIsInitialized = -1;
1350     public final boolean isInitialized() {
1351       byte isInitialized = memoizedIsInitialized;
1352       if (isInitialized != -1) return isInitialized == 1;
1353 
1354       if (!hasFamily()) {
1355         memoizedIsInitialized = 0;
1356         return false;
1357       }
1358       memoizedIsInitialized = 1;
1359       return true;
1360     }
1361 
1362     public void writeTo(com.google.protobuf.CodedOutputStream output)
1363                         throws java.io.IOException {
1364       getSerializedSize();
1365       if (((bitField0_ & 0x00000001) == 0x00000001)) {
1366         output.writeBytes(1, family_);
1367       }
1368       for (int i = 0; i < qualifier_.size(); i++) {
1369         output.writeBytes(2, qualifier_.get(i));
1370       }
1371       getUnknownFields().writeTo(output);
1372     }
1373 
1374     private int memoizedSerializedSize = -1;
1375     public int getSerializedSize() {
1376       int size = memoizedSerializedSize;
1377       if (size != -1) return size;
1378 
1379       size = 0;
1380       if (((bitField0_ & 0x00000001) == 0x00000001)) {
1381         size += com.google.protobuf.CodedOutputStream
1382           .computeBytesSize(1, family_);
1383       }
1384       {
1385         int dataSize = 0;
1386         for (int i = 0; i < qualifier_.size(); i++) {
1387           dataSize += com.google.protobuf.CodedOutputStream
1388             .computeBytesSizeNoTag(qualifier_.get(i));
1389         }
1390         size += dataSize;
1391         size += 1 * getQualifierList().size();
1392       }
1393       size += getUnknownFields().getSerializedSize();
1394       memoizedSerializedSize = size;
1395       return size;
1396     }
1397 
1398     private static final long serialVersionUID = 0L;
1399     @java.lang.Override
1400     protected java.lang.Object writeReplace()
1401         throws java.io.ObjectStreamException {
1402       return super.writeReplace();
1403     }
1404 
1405     @java.lang.Override
1406     public boolean equals(final java.lang.Object obj) {
1407       if (obj == this) {
1408        return true;
1409       }
1410       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column)) {
1411         return super.equals(obj);
1412       }
1413       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column) obj;
1414 
1415       boolean result = true;
1416       result = result && (hasFamily() == other.hasFamily());
1417       if (hasFamily()) {
1418         result = result && getFamily()
1419             .equals(other.getFamily());
1420       }
1421       result = result && getQualifierList()
1422           .equals(other.getQualifierList());
1423       result = result &&
1424           getUnknownFields().equals(other.getUnknownFields());
1425       return result;
1426     }
1427 
1428     private int memoizedHashCode = 0;
1429     @java.lang.Override
1430     public int hashCode() {
1431       if (memoizedHashCode != 0) {
1432         return memoizedHashCode;
1433       }
1434       int hash = 41;
1435       hash = (19 * hash) + getDescriptorForType().hashCode();
1436       if (hasFamily()) {
1437         hash = (37 * hash) + FAMILY_FIELD_NUMBER;
1438         hash = (53 * hash) + getFamily().hashCode();
1439       }
1440       if (getQualifierCount() > 0) {
1441         hash = (37 * hash) + QUALIFIER_FIELD_NUMBER;
1442         hash = (53 * hash) + getQualifierList().hashCode();
1443       }
1444       hash = (29 * hash) + getUnknownFields().hashCode();
1445       memoizedHashCode = hash;
1446       return hash;
1447     }
1448 
1449     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom(
1450         com.google.protobuf.ByteString data)
1451         throws com.google.protobuf.InvalidProtocolBufferException {
1452       return PARSER.parseFrom(data);
1453     }
1454     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom(
1455         com.google.protobuf.ByteString data,
1456         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1457         throws com.google.protobuf.InvalidProtocolBufferException {
1458       return PARSER.parseFrom(data, extensionRegistry);
1459     }
1460     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom(byte[] data)
1461         throws com.google.protobuf.InvalidProtocolBufferException {
1462       return PARSER.parseFrom(data);
1463     }
1464     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom(
1465         byte[] data,
1466         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1467         throws com.google.protobuf.InvalidProtocolBufferException {
1468       return PARSER.parseFrom(data, extensionRegistry);
1469     }
1470     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom(java.io.InputStream input)
1471         throws java.io.IOException {
1472       return PARSER.parseFrom(input);
1473     }
1474     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom(
1475         java.io.InputStream input,
1476         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1477         throws java.io.IOException {
1478       return PARSER.parseFrom(input, extensionRegistry);
1479     }
1480     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseDelimitedFrom(java.io.InputStream input)
1481         throws java.io.IOException {
1482       return PARSER.parseDelimitedFrom(input);
1483     }
1484     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseDelimitedFrom(
1485         java.io.InputStream input,
1486         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1487         throws java.io.IOException {
1488       return PARSER.parseDelimitedFrom(input, extensionRegistry);
1489     }
1490     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom(
1491         com.google.protobuf.CodedInputStream input)
1492         throws java.io.IOException {
1493       return PARSER.parseFrom(input);
1494     }
1495     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom(
1496         com.google.protobuf.CodedInputStream input,
1497         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1498         throws java.io.IOException {
1499       return PARSER.parseFrom(input, extensionRegistry);
1500     }
1501 
1502     public static Builder newBuilder() { return Builder.create(); }
1503     public Builder newBuilderForType() { return newBuilder(); }
1504     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column prototype) {
1505       return newBuilder().mergeFrom(prototype);
1506     }
1507     public Builder toBuilder() { return newBuilder(this); }
1508 
1509     @java.lang.Override
1510     protected Builder newBuilderForType(
1511         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1512       Builder builder = new Builder(parent);
1513       return builder;
1514     }
1515     /**
1516      * Protobuf type {@code hbase.pb.Column}
1517      *
1518      * <pre>
1519      **
1520      * Container for a list of column qualifier names of a family.
1521      * </pre>
1522      */
1523     public static final class Builder extends
1524         com.google.protobuf.GeneratedMessage.Builder<Builder>
1525        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder {
1526       public static final com.google.protobuf.Descriptors.Descriptor
1527           getDescriptor() {
1528         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Column_descriptor;
1529       }
1530 
1531       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1532           internalGetFieldAccessorTable() {
1533         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Column_fieldAccessorTable
1534             .ensureFieldAccessorsInitialized(
1535                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder.class);
1536       }
1537 
1538       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.newBuilder()
1539       private Builder() {
1540         maybeForceBuilderInitialization();
1541       }
1542 
1543       private Builder(
1544           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1545         super(parent);
1546         maybeForceBuilderInitialization();
1547       }
1548       private void maybeForceBuilderInitialization() {
1549         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
1550         }
1551       }
1552       private static Builder create() {
1553         return new Builder();
1554       }
1555 
1556       public Builder clear() {
1557         super.clear();
1558         family_ = com.google.protobuf.ByteString.EMPTY;
1559         bitField0_ = (bitField0_ & ~0x00000001);
1560         qualifier_ = java.util.Collections.emptyList();
1561         bitField0_ = (bitField0_ & ~0x00000002);
1562         return this;
1563       }
1564 
1565       public Builder clone() {
1566         return create().mergeFrom(buildPartial());
1567       }
1568 
1569       public com.google.protobuf.Descriptors.Descriptor
1570           getDescriptorForType() {
1571         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Column_descriptor;
1572       }
1573 
1574       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getDefaultInstanceForType() {
1575         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance();
1576       }
1577 
1578       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column build() {
1579         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column result = buildPartial();
1580         if (!result.isInitialized()) {
1581           throw newUninitializedMessageException(result);
1582         }
1583         return result;
1584       }
1585 
1586       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column buildPartial() {
1587         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column(this);
1588         int from_bitField0_ = bitField0_;
1589         int to_bitField0_ = 0;
1590         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
1591           to_bitField0_ |= 0x00000001;
1592         }
1593         result.family_ = family_;
1594         if (((bitField0_ & 0x00000002) == 0x00000002)) {
1595           qualifier_ = java.util.Collections.unmodifiableList(qualifier_);
1596           bitField0_ = (bitField0_ & ~0x00000002);
1597         }
1598         result.qualifier_ = qualifier_;
1599         result.bitField0_ = to_bitField0_;
1600         onBuilt();
1601         return result;
1602       }
1603 
1604       public Builder mergeFrom(com.google.protobuf.Message other) {
1605         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column) {
1606           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column)other);
1607         } else {
1608           super.mergeFrom(other);
1609           return this;
1610         }
1611       }
1612 
1613       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column other) {
1614         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance()) return this;
1615         if (other.hasFamily()) {
1616           setFamily(other.getFamily());
1617         }
1618         if (!other.qualifier_.isEmpty()) {
1619           if (qualifier_.isEmpty()) {
1620             qualifier_ = other.qualifier_;
1621             bitField0_ = (bitField0_ & ~0x00000002);
1622           } else {
1623             ensureQualifierIsMutable();
1624             qualifier_.addAll(other.qualifier_);
1625           }
1626           onChanged();
1627         }
1628         this.mergeUnknownFields(other.getUnknownFields());
1629         return this;
1630       }
1631 
1632       public final boolean isInitialized() {
1633         if (!hasFamily()) {
1634           
1635           return false;
1636         }
1637         return true;
1638       }
1639 
1640       public Builder mergeFrom(
1641           com.google.protobuf.CodedInputStream input,
1642           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1643           throws java.io.IOException {
1644         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parsedMessage = null;
1645         try {
1646           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
1647         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1648           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column) e.getUnfinishedMessage();
1649           throw e;
1650         } finally {
1651           if (parsedMessage != null) {
1652             mergeFrom(parsedMessage);
1653           }
1654         }
1655         return this;
1656       }
1657       private int bitField0_;
1658 
1659       // required bytes family = 1;
1660       private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY;
1661       /**
1662        * <code>required bytes family = 1;</code>
1663        */
1664       public boolean hasFamily() {
1665         return ((bitField0_ & 0x00000001) == 0x00000001);
1666       }
1667       /**
1668        * <code>required bytes family = 1;</code>
1669        */
1670       public com.google.protobuf.ByteString getFamily() {
1671         return family_;
1672       }
1673       /**
1674        * <code>required bytes family = 1;</code>
1675        */
1676       public Builder setFamily(com.google.protobuf.ByteString value) {
1677         if (value == null) {
1678     throw new NullPointerException();
1679   }
1680   bitField0_ |= 0x00000001;
1681         family_ = value;
1682         onChanged();
1683         return this;
1684       }
1685       /**
1686        * <code>required bytes family = 1;</code>
1687        */
1688       public Builder clearFamily() {
1689         bitField0_ = (bitField0_ & ~0x00000001);
1690         family_ = getDefaultInstance().getFamily();
1691         onChanged();
1692         return this;
1693       }
1694 
1695       // repeated bytes qualifier = 2;
1696       private java.util.List<com.google.protobuf.ByteString> qualifier_ = java.util.Collections.emptyList();
1697       private void ensureQualifierIsMutable() {
1698         if (!((bitField0_ & 0x00000002) == 0x00000002)) {
1699           qualifier_ = new java.util.ArrayList<com.google.protobuf.ByteString>(qualifier_);
1700           bitField0_ |= 0x00000002;
1701          }
1702       }
1703       /**
1704        * <code>repeated bytes qualifier = 2;</code>
1705        */
1706       public java.util.List<com.google.protobuf.ByteString>
1707           getQualifierList() {
1708         return java.util.Collections.unmodifiableList(qualifier_);
1709       }
1710       /**
1711        * <code>repeated bytes qualifier = 2;</code>
1712        */
1713       public int getQualifierCount() {
1714         return qualifier_.size();
1715       }
1716       /**
1717        * <code>repeated bytes qualifier = 2;</code>
1718        */
1719       public com.google.protobuf.ByteString getQualifier(int index) {
1720         return qualifier_.get(index);
1721       }
1722       /**
1723        * <code>repeated bytes qualifier = 2;</code>
1724        */
1725       public Builder setQualifier(
1726           int index, com.google.protobuf.ByteString value) {
1727         if (value == null) {
1728     throw new NullPointerException();
1729   }
1730   ensureQualifierIsMutable();
1731         qualifier_.set(index, value);
1732         onChanged();
1733         return this;
1734       }
1735       /**
1736        * <code>repeated bytes qualifier = 2;</code>
1737        */
1738       public Builder addQualifier(com.google.protobuf.ByteString value) {
1739         if (value == null) {
1740     throw new NullPointerException();
1741   }
1742   ensureQualifierIsMutable();
1743         qualifier_.add(value);
1744         onChanged();
1745         return this;
1746       }
1747       /**
1748        * <code>repeated bytes qualifier = 2;</code>
1749        */
1750       public Builder addAllQualifier(
1751           java.lang.Iterable<? extends com.google.protobuf.ByteString> values) {
1752         ensureQualifierIsMutable();
1753         super.addAll(values, qualifier_);
1754         onChanged();
1755         return this;
1756       }
1757       /**
1758        * <code>repeated bytes qualifier = 2;</code>
1759        */
1760       public Builder clearQualifier() {
1761         qualifier_ = java.util.Collections.emptyList();
1762         bitField0_ = (bitField0_ & ~0x00000002);
1763         onChanged();
1764         return this;
1765       }
1766 
1767       // @@protoc_insertion_point(builder_scope:hbase.pb.Column)
1768     }
1769 
1770     static {
1771       defaultInstance = new Column(true);
1772       defaultInstance.initFields();
1773     }
1774 
1775     // @@protoc_insertion_point(class_scope:hbase.pb.Column)
1776   }
1777 
1778   public interface GetOrBuilder
1779       extends com.google.protobuf.MessageOrBuilder {
1780 
1781     // required bytes row = 1;
1782     /**
1783      * <code>required bytes row = 1;</code>
1784      */
1785     boolean hasRow();
1786     /**
1787      * <code>required bytes row = 1;</code>
1788      */
1789     com.google.protobuf.ByteString getRow();
1790 
1791     // repeated .hbase.pb.Column column = 2;
1792     /**
1793      * <code>repeated .hbase.pb.Column column = 2;</code>
1794      */
1795     java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> 
1796         getColumnList();
1797     /**
1798      * <code>repeated .hbase.pb.Column column = 2;</code>
1799      */
1800     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index);
1801     /**
1802      * <code>repeated .hbase.pb.Column column = 2;</code>
1803      */
1804     int getColumnCount();
1805     /**
1806      * <code>repeated .hbase.pb.Column column = 2;</code>
1807      */
1808     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder> 
1809         getColumnOrBuilderList();
1810     /**
1811      * <code>repeated .hbase.pb.Column column = 2;</code>
1812      */
1813     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder(
1814         int index);
1815 
1816     // repeated .hbase.pb.NameBytesPair attribute = 3;
1817     /**
1818      * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
1819      */
1820     java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> 
1821         getAttributeList();
1822     /**
1823      * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
1824      */
1825     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index);
1826     /**
1827      * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
1828      */
1829     int getAttributeCount();
1830     /**
1831      * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
1832      */
1833     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> 
1834         getAttributeOrBuilderList();
1835     /**
1836      * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
1837      */
1838     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder(
1839         int index);
1840 
1841     // optional .hbase.pb.Filter filter = 4;
1842     /**
1843      * <code>optional .hbase.pb.Filter filter = 4;</code>
1844      */
1845     boolean hasFilter();
1846     /**
1847      * <code>optional .hbase.pb.Filter filter = 4;</code>
1848      */
1849     org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter();
1850     /**
1851      * <code>optional .hbase.pb.Filter filter = 4;</code>
1852      */
1853     org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder();
1854 
1855     // optional .hbase.pb.TimeRange time_range = 5;
1856     /**
1857      * <code>optional .hbase.pb.TimeRange time_range = 5;</code>
1858      */
1859     boolean hasTimeRange();
1860     /**
1861      * <code>optional .hbase.pb.TimeRange time_range = 5;</code>
1862      */
1863     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange();
1864     /**
1865      * <code>optional .hbase.pb.TimeRange time_range = 5;</code>
1866      */
1867     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder();
1868 
1869     // optional uint32 max_versions = 6 [default = 1];
1870     /**
1871      * <code>optional uint32 max_versions = 6 [default = 1];</code>
1872      */
1873     boolean hasMaxVersions();
1874     /**
1875      * <code>optional uint32 max_versions = 6 [default = 1];</code>
1876      */
1877     int getMaxVersions();
1878 
1879     // optional bool cache_blocks = 7 [default = true];
1880     /**
1881      * <code>optional bool cache_blocks = 7 [default = true];</code>
1882      */
1883     boolean hasCacheBlocks();
1884     /**
1885      * <code>optional bool cache_blocks = 7 [default = true];</code>
1886      */
1887     boolean getCacheBlocks();
1888 
1889     // optional uint32 store_limit = 8;
1890     /**
1891      * <code>optional uint32 store_limit = 8;</code>
1892      */
1893     boolean hasStoreLimit();
1894     /**
1895      * <code>optional uint32 store_limit = 8;</code>
1896      */
1897     int getStoreLimit();
1898 
1899     // optional uint32 store_offset = 9;
1900     /**
1901      * <code>optional uint32 store_offset = 9;</code>
1902      */
1903     boolean hasStoreOffset();
1904     /**
1905      * <code>optional uint32 store_offset = 9;</code>
1906      */
1907     int getStoreOffset();
1908 
1909     // optional bool existence_only = 10 [default = false];
1910     /**
1911      * <code>optional bool existence_only = 10 [default = false];</code>
1912      *
1913      * <pre>
1914      * The result isn't asked for, just check for
1915      * the existence.
1916      * </pre>
1917      */
1918     boolean hasExistenceOnly();
1919     /**
1920      * <code>optional bool existence_only = 10 [default = false];</code>
1921      *
1922      * <pre>
1923      * The result isn't asked for, just check for
1924      * the existence.
1925      * </pre>
1926      */
1927     boolean getExistenceOnly();
1928 
1929     // optional .hbase.pb.Consistency consistency = 12 [default = STRONG];
1930     /**
1931      * <code>optional .hbase.pb.Consistency consistency = 12 [default = STRONG];</code>
1932      */
1933     boolean hasConsistency();
1934     /**
1935      * <code>optional .hbase.pb.Consistency consistency = 12 [default = STRONG];</code>
1936      */
1937     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency getConsistency();
1938   }
1939   /**
1940    * Protobuf type {@code hbase.pb.Get}
1941    *
1942    * <pre>
1943    **
1944    * The protocol buffer version of Get.
1945    * Unless existence_only is specified, return all the requested data
1946    * for the row that matches exactly.
1947    * </pre>
1948    */
1949   public static final class Get extends
1950       com.google.protobuf.GeneratedMessage
1951       implements GetOrBuilder {
1952     // Use Get.newBuilder() to construct.
1953     private Get(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
1954       super(builder);
1955       this.unknownFields = builder.getUnknownFields();
1956     }
1957     private Get(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
1958 
1959     private static final Get defaultInstance;
1960     public static Get getDefaultInstance() {
1961       return defaultInstance;
1962     }
1963 
1964     public Get getDefaultInstanceForType() {
1965       return defaultInstance;
1966     }
1967 
1968     private final com.google.protobuf.UnknownFieldSet unknownFields;
1969     @java.lang.Override
1970     public final com.google.protobuf.UnknownFieldSet
1971         getUnknownFields() {
1972       return this.unknownFields;
1973     }
1974     private Get(
1975         com.google.protobuf.CodedInputStream input,
1976         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1977         throws com.google.protobuf.InvalidProtocolBufferException {
1978       initFields();
1979       int mutable_bitField0_ = 0;
1980       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
1981           com.google.protobuf.UnknownFieldSet.newBuilder();
1982       try {
1983         boolean done = false;
1984         while (!done) {
1985           int tag = input.readTag();
1986           switch (tag) {
1987             case 0:
1988               done = true;
1989               break;
1990             default: {
1991               if (!parseUnknownField(input, unknownFields,
1992                                      extensionRegistry, tag)) {
1993                 done = true;
1994               }
1995               break;
1996             }
1997             case 10: {
1998               bitField0_ |= 0x00000001;
1999               row_ = input.readBytes();
2000               break;
2001             }
2002             case 18: {
2003               if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
2004                 column_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column>();
2005                 mutable_bitField0_ |= 0x00000002;
2006               }
2007               column_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.PARSER, extensionRegistry));
2008               break;
2009             }
2010             case 26: {
2011               if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
2012                 attribute_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair>();
2013                 mutable_bitField0_ |= 0x00000004;
2014               }
2015               attribute_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.PARSER, extensionRegistry));
2016               break;
2017             }
2018             case 34: {
2019               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder subBuilder = null;
2020               if (((bitField0_ & 0x00000002) == 0x00000002)) {
2021                 subBuilder = filter_.toBuilder();
2022               }
2023               filter_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.PARSER, extensionRegistry);
2024               if (subBuilder != null) {
2025                 subBuilder.mergeFrom(filter_);
2026                 filter_ = subBuilder.buildPartial();
2027               }
2028               bitField0_ |= 0x00000002;
2029               break;
2030             }
2031             case 42: {
2032               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder subBuilder = null;
2033               if (((bitField0_ & 0x00000004) == 0x00000004)) {
2034                 subBuilder = timeRange_.toBuilder();
2035               }
2036               timeRange_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.PARSER, extensionRegistry);
2037               if (subBuilder != null) {
2038                 subBuilder.mergeFrom(timeRange_);
2039                 timeRange_ = subBuilder.buildPartial();
2040               }
2041               bitField0_ |= 0x00000004;
2042               break;
2043             }
2044             case 48: {
2045               bitField0_ |= 0x00000008;
2046               maxVersions_ = input.readUInt32();
2047               break;
2048             }
2049             case 56: {
2050               bitField0_ |= 0x00000010;
2051               cacheBlocks_ = input.readBool();
2052               break;
2053             }
2054             case 64: {
2055               bitField0_ |= 0x00000020;
2056               storeLimit_ = input.readUInt32();
2057               break;
2058             }
2059             case 72: {
2060               bitField0_ |= 0x00000040;
2061               storeOffset_ = input.readUInt32();
2062               break;
2063             }
2064             case 80: {
2065               bitField0_ |= 0x00000080;
2066               existenceOnly_ = input.readBool();
2067               break;
2068             }
2069             case 96: {
2070               int rawValue = input.readEnum();
2071               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency value = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency.valueOf(rawValue);
2072               if (value == null) {
2073                 unknownFields.mergeVarintField(12, rawValue);
2074               } else {
2075                 bitField0_ |= 0x00000100;
2076                 consistency_ = value;
2077               }
2078               break;
2079             }
2080           }
2081         }
2082       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
2083         throw e.setUnfinishedMessage(this);
2084       } catch (java.io.IOException e) {
2085         throw new com.google.protobuf.InvalidProtocolBufferException(
2086             e.getMessage()).setUnfinishedMessage(this);
2087       } finally {
2088         if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
2089           column_ = java.util.Collections.unmodifiableList(column_);
2090         }
2091         if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
2092           attribute_ = java.util.Collections.unmodifiableList(attribute_);
2093         }
2094         this.unknownFields = unknownFields.build();
2095         makeExtensionsImmutable();
2096       }
2097     }
2098     public static final com.google.protobuf.Descriptors.Descriptor
2099         getDescriptor() {
2100       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Get_descriptor;
2101     }
2102 
2103     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
2104         internalGetFieldAccessorTable() {
2105       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Get_fieldAccessorTable
2106           .ensureFieldAccessorsInitialized(
2107               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder.class);
2108     }
2109 
2110     public static com.google.protobuf.Parser<Get> PARSER =
2111         new com.google.protobuf.AbstractParser<Get>() {
2112       public Get parsePartialFrom(
2113           com.google.protobuf.CodedInputStream input,
2114           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2115           throws com.google.protobuf.InvalidProtocolBufferException {
2116         return new Get(input, extensionRegistry);
2117       }
2118     };
2119 
2120     @java.lang.Override
2121     public com.google.protobuf.Parser<Get> getParserForType() {
2122       return PARSER;
2123     }
2124 
2125     private int bitField0_;
2126     // required bytes row = 1;
2127     public static final int ROW_FIELD_NUMBER = 1;
2128     private com.google.protobuf.ByteString row_;
2129     /**
2130      * <code>required bytes row = 1;</code>
2131      */
2132     public boolean hasRow() {
2133       return ((bitField0_ & 0x00000001) == 0x00000001);
2134     }
2135     /**
2136      * <code>required bytes row = 1;</code>
2137      */
2138     public com.google.protobuf.ByteString getRow() {
2139       return row_;
2140     }
2141 
2142     // repeated .hbase.pb.Column column = 2;
2143     public static final int COLUMN_FIELD_NUMBER = 2;
2144     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> column_;
2145     /**
2146      * <code>repeated .hbase.pb.Column column = 2;</code>
2147      */
2148     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> getColumnList() {
2149       return column_;
2150     }
2151     /**
2152      * <code>repeated .hbase.pb.Column column = 2;</code>
2153      */
2154     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder> 
2155         getColumnOrBuilderList() {
2156       return column_;
2157     }
2158     /**
2159      * <code>repeated .hbase.pb.Column column = 2;</code>
2160      */
2161     public int getColumnCount() {
2162       return column_.size();
2163     }
2164     /**
2165      * <code>repeated .hbase.pb.Column column = 2;</code>
2166      */
2167     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index) {
2168       return column_.get(index);
2169     }
2170     /**
2171      * <code>repeated .hbase.pb.Column column = 2;</code>
2172      */
2173     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder(
2174         int index) {
2175       return column_.get(index);
2176     }
2177 
2178     // repeated .hbase.pb.NameBytesPair attribute = 3;
2179     public static final int ATTRIBUTE_FIELD_NUMBER = 3;
2180     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> attribute_;
2181     /**
2182      * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
2183      */
2184     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> getAttributeList() {
2185       return attribute_;
2186     }
2187     /**
2188      * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
2189      */
2190     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> 
2191         getAttributeOrBuilderList() {
2192       return attribute_;
2193     }
2194     /**
2195      * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
2196      */
2197     public int getAttributeCount() {
2198       return attribute_.size();
2199     }
2200     /**
2201      * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
2202      */
2203     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) {
2204       return attribute_.get(index);
2205     }
2206     /**
2207      * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
2208      */
2209     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder(
2210         int index) {
2211       return attribute_.get(index);
2212     }
2213 
2214     // optional .hbase.pb.Filter filter = 4;
2215     public static final int FILTER_FIELD_NUMBER = 4;
2216     private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter filter_;
2217     /**
2218      * <code>optional .hbase.pb.Filter filter = 4;</code>
2219      */
2220     public boolean hasFilter() {
2221       return ((bitField0_ & 0x00000002) == 0x00000002);
2222     }
2223     /**
2224      * <code>optional .hbase.pb.Filter filter = 4;</code>
2225      */
2226     public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter() {
2227       return filter_;
2228     }
2229     /**
2230      * <code>optional .hbase.pb.Filter filter = 4;</code>
2231      */
2232     public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder() {
2233       return filter_;
2234     }
2235 
2236     // optional .hbase.pb.TimeRange time_range = 5;
2237     public static final int TIME_RANGE_FIELD_NUMBER = 5;
2238     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_;
2239     /**
2240      * <code>optional .hbase.pb.TimeRange time_range = 5;</code>
2241      */
2242     public boolean hasTimeRange() {
2243       return ((bitField0_ & 0x00000004) == 0x00000004);
2244     }
2245     /**
2246      * <code>optional .hbase.pb.TimeRange time_range = 5;</code>
2247      */
2248     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() {
2249       return timeRange_;
2250     }
2251     /**
2252      * <code>optional .hbase.pb.TimeRange time_range = 5;</code>
2253      */
2254     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() {
2255       return timeRange_;
2256     }
2257 
2258     // optional uint32 max_versions = 6 [default = 1];
2259     public static final int MAX_VERSIONS_FIELD_NUMBER = 6;
2260     private int maxVersions_;
2261     /**
2262      * <code>optional uint32 max_versions = 6 [default = 1];</code>
2263      */
2264     public boolean hasMaxVersions() {
2265       return ((bitField0_ & 0x00000008) == 0x00000008);
2266     }
2267     /**
2268      * <code>optional uint32 max_versions = 6 [default = 1];</code>
2269      */
2270     public int getMaxVersions() {
2271       return maxVersions_;
2272     }
2273 
2274     // optional bool cache_blocks = 7 [default = true];
2275     public static final int CACHE_BLOCKS_FIELD_NUMBER = 7;
2276     private boolean cacheBlocks_;
2277     /**
2278      * <code>optional bool cache_blocks = 7 [default = true];</code>
2279      */
2280     public boolean hasCacheBlocks() {
2281       return ((bitField0_ & 0x00000010) == 0x00000010);
2282     }
2283     /**
2284      * <code>optional bool cache_blocks = 7 [default = true];</code>
2285      */
2286     public boolean getCacheBlocks() {
2287       return cacheBlocks_;
2288     }
2289 
2290     // optional uint32 store_limit = 8;
2291     public static final int STORE_LIMIT_FIELD_NUMBER = 8;
2292     private int storeLimit_;
2293     /**
2294      * <code>optional uint32 store_limit = 8;</code>
2295      */
2296     public boolean hasStoreLimit() {
2297       return ((bitField0_ & 0x00000020) == 0x00000020);
2298     }
2299     /**
2300      * <code>optional uint32 store_limit = 8;</code>
2301      */
2302     public int getStoreLimit() {
2303       return storeLimit_;
2304     }
2305 
2306     // optional uint32 store_offset = 9;
2307     public static final int STORE_OFFSET_FIELD_NUMBER = 9;
2308     private int storeOffset_;
2309     /**
2310      * <code>optional uint32 store_offset = 9;</code>
2311      */
2312     public boolean hasStoreOffset() {
2313       return ((bitField0_ & 0x00000040) == 0x00000040);
2314     }
2315     /**
2316      * <code>optional uint32 store_offset = 9;</code>
2317      */
2318     public int getStoreOffset() {
2319       return storeOffset_;
2320     }
2321 
2322     // optional bool existence_only = 10 [default = false];
2323     public static final int EXISTENCE_ONLY_FIELD_NUMBER = 10;
2324     private boolean existenceOnly_;
2325     /**
2326      * <code>optional bool existence_only = 10 [default = false];</code>
2327      *
2328      * <pre>
2329      * The result isn't asked for, just check for
2330      * the existence.
2331      * </pre>
2332      */
2333     public boolean hasExistenceOnly() {
2334       return ((bitField0_ & 0x00000080) == 0x00000080);
2335     }
2336     /**
2337      * <code>optional bool existence_only = 10 [default = false];</code>
2338      *
2339      * <pre>
2340      * The result isn't asked for, just check for
2341      * the existence.
2342      * </pre>
2343      */
2344     public boolean getExistenceOnly() {
2345       return existenceOnly_;
2346     }
2347 
2348     // optional .hbase.pb.Consistency consistency = 12 [default = STRONG];
2349     public static final int CONSISTENCY_FIELD_NUMBER = 12;
2350     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency consistency_;
2351     /**
2352      * <code>optional .hbase.pb.Consistency consistency = 12 [default = STRONG];</code>
2353      */
2354     public boolean hasConsistency() {
2355       return ((bitField0_ & 0x00000100) == 0x00000100);
2356     }
2357     /**
2358      * <code>optional .hbase.pb.Consistency consistency = 12 [default = STRONG];</code>
2359      */
2360     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency getConsistency() {
2361       return consistency_;
2362     }
2363 
2364     private void initFields() {
2365       row_ = com.google.protobuf.ByteString.EMPTY;
2366       column_ = java.util.Collections.emptyList();
2367       attribute_ = java.util.Collections.emptyList();
2368       filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
2369       timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance();
2370       maxVersions_ = 1;
2371       cacheBlocks_ = true;
2372       storeLimit_ = 0;
2373       storeOffset_ = 0;
2374       existenceOnly_ = false;
2375       consistency_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency.STRONG;
2376     }
2377     private byte memoizedIsInitialized = -1;
2378     public final boolean isInitialized() {
2379       byte isInitialized = memoizedIsInitialized;
2380       if (isInitialized != -1) return isInitialized == 1;
2381 
2382       if (!hasRow()) {
2383         memoizedIsInitialized = 0;
2384         return false;
2385       }
2386       for (int i = 0; i < getColumnCount(); i++) {
2387         if (!getColumn(i).isInitialized()) {
2388           memoizedIsInitialized = 0;
2389           return false;
2390         }
2391       }
2392       for (int i = 0; i < getAttributeCount(); i++) {
2393         if (!getAttribute(i).isInitialized()) {
2394           memoizedIsInitialized = 0;
2395           return false;
2396         }
2397       }
2398       if (hasFilter()) {
2399         if (!getFilter().isInitialized()) {
2400           memoizedIsInitialized = 0;
2401           return false;
2402         }
2403       }
2404       memoizedIsInitialized = 1;
2405       return true;
2406     }
2407 
2408     public void writeTo(com.google.protobuf.CodedOutputStream output)
2409                         throws java.io.IOException {
2410       getSerializedSize();
2411       if (((bitField0_ & 0x00000001) == 0x00000001)) {
2412         output.writeBytes(1, row_);
2413       }
2414       for (int i = 0; i < column_.size(); i++) {
2415         output.writeMessage(2, column_.get(i));
2416       }
2417       for (int i = 0; i < attribute_.size(); i++) {
2418         output.writeMessage(3, attribute_.get(i));
2419       }
2420       if (((bitField0_ & 0x00000002) == 0x00000002)) {
2421         output.writeMessage(4, filter_);
2422       }
2423       if (((bitField0_ & 0x00000004) == 0x00000004)) {
2424         output.writeMessage(5, timeRange_);
2425       }
2426       if (((bitField0_ & 0x00000008) == 0x00000008)) {
2427         output.writeUInt32(6, maxVersions_);
2428       }
2429       if (((bitField0_ & 0x00000010) == 0x00000010)) {
2430         output.writeBool(7, cacheBlocks_);
2431       }
2432       if (((bitField0_ & 0x00000020) == 0x00000020)) {
2433         output.writeUInt32(8, storeLimit_);
2434       }
2435       if (((bitField0_ & 0x00000040) == 0x00000040)) {
2436         output.writeUInt32(9, storeOffset_);
2437       }
2438       if (((bitField0_ & 0x00000080) == 0x00000080)) {
2439         output.writeBool(10, existenceOnly_);
2440       }
2441       if (((bitField0_ & 0x00000100) == 0x00000100)) {
2442         output.writeEnum(12, consistency_.getNumber());
2443       }
2444       getUnknownFields().writeTo(output);
2445     }
2446 
2447     private int memoizedSerializedSize = -1;
2448     public int getSerializedSize() {
2449       int size = memoizedSerializedSize;
2450       if (size != -1) return size;
2451 
2452       size = 0;
2453       if (((bitField0_ & 0x00000001) == 0x00000001)) {
2454         size += com.google.protobuf.CodedOutputStream
2455           .computeBytesSize(1, row_);
2456       }
2457       for (int i = 0; i < column_.size(); i++) {
2458         size += com.google.protobuf.CodedOutputStream
2459           .computeMessageSize(2, column_.get(i));
2460       }
2461       for (int i = 0; i < attribute_.size(); i++) {
2462         size += com.google.protobuf.CodedOutputStream
2463           .computeMessageSize(3, attribute_.get(i));
2464       }
2465       if (((bitField0_ & 0x00000002) == 0x00000002)) {
2466         size += com.google.protobuf.CodedOutputStream
2467           .computeMessageSize(4, filter_);
2468       }
2469       if (((bitField0_ & 0x00000004) == 0x00000004)) {
2470         size += com.google.protobuf.CodedOutputStream
2471           .computeMessageSize(5, timeRange_);
2472       }
2473       if (((bitField0_ & 0x00000008) == 0x00000008)) {
2474         size += com.google.protobuf.CodedOutputStream
2475           .computeUInt32Size(6, maxVersions_);
2476       }
2477       if (((bitField0_ & 0x00000010) == 0x00000010)) {
2478         size += com.google.protobuf.CodedOutputStream
2479           .computeBoolSize(7, cacheBlocks_);
2480       }
2481       if (((bitField0_ & 0x00000020) == 0x00000020)) {
2482         size += com.google.protobuf.CodedOutputStream
2483           .computeUInt32Size(8, storeLimit_);
2484       }
2485       if (((bitField0_ & 0x00000040) == 0x00000040)) {
2486         size += com.google.protobuf.CodedOutputStream
2487           .computeUInt32Size(9, storeOffset_);
2488       }
2489       if (((bitField0_ & 0x00000080) == 0x00000080)) {
2490         size += com.google.protobuf.CodedOutputStream
2491           .computeBoolSize(10, existenceOnly_);
2492       }
2493       if (((bitField0_ & 0x00000100) == 0x00000100)) {
2494         size += com.google.protobuf.CodedOutputStream
2495           .computeEnumSize(12, consistency_.getNumber());
2496       }
2497       size += getUnknownFields().getSerializedSize();
2498       memoizedSerializedSize = size;
2499       return size;
2500     }
2501 
2502     private static final long serialVersionUID = 0L;
2503     @java.lang.Override
2504     protected java.lang.Object writeReplace()
2505         throws java.io.ObjectStreamException {
2506       return super.writeReplace();
2507     }
2508 
2509     @java.lang.Override
2510     public boolean equals(final java.lang.Object obj) {
2511       if (obj == this) {
2512        return true;
2513       }
2514       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get)) {
2515         return super.equals(obj);
2516       }
2517       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get) obj;
2518 
2519       boolean result = true;
2520       result = result && (hasRow() == other.hasRow());
2521       if (hasRow()) {
2522         result = result && getRow()
2523             .equals(other.getRow());
2524       }
2525       result = result && getColumnList()
2526           .equals(other.getColumnList());
2527       result = result && getAttributeList()
2528           .equals(other.getAttributeList());
2529       result = result && (hasFilter() == other.hasFilter());
2530       if (hasFilter()) {
2531         result = result && getFilter()
2532             .equals(other.getFilter());
2533       }
2534       result = result && (hasTimeRange() == other.hasTimeRange());
2535       if (hasTimeRange()) {
2536         result = result && getTimeRange()
2537             .equals(other.getTimeRange());
2538       }
2539       result = result && (hasMaxVersions() == other.hasMaxVersions());
2540       if (hasMaxVersions()) {
2541         result = result && (getMaxVersions()
2542             == other.getMaxVersions());
2543       }
2544       result = result && (hasCacheBlocks() == other.hasCacheBlocks());
2545       if (hasCacheBlocks()) {
2546         result = result && (getCacheBlocks()
2547             == other.getCacheBlocks());
2548       }
2549       result = result && (hasStoreLimit() == other.hasStoreLimit());
2550       if (hasStoreLimit()) {
2551         result = result && (getStoreLimit()
2552             == other.getStoreLimit());
2553       }
2554       result = result && (hasStoreOffset() == other.hasStoreOffset());
2555       if (hasStoreOffset()) {
2556         result = result && (getStoreOffset()
2557             == other.getStoreOffset());
2558       }
2559       result = result && (hasExistenceOnly() == other.hasExistenceOnly());
2560       if (hasExistenceOnly()) {
2561         result = result && (getExistenceOnly()
2562             == other.getExistenceOnly());
2563       }
2564       result = result && (hasConsistency() == other.hasConsistency());
2565       if (hasConsistency()) {
2566         result = result &&
2567             (getConsistency() == other.getConsistency());
2568       }
2569       result = result &&
2570           getUnknownFields().equals(other.getUnknownFields());
2571       return result;
2572     }
2573 
2574     private int memoizedHashCode = 0;
2575     @java.lang.Override
2576     public int hashCode() {
2577       if (memoizedHashCode != 0) {
2578         return memoizedHashCode;
2579       }
2580       int hash = 41;
2581       hash = (19 * hash) + getDescriptorForType().hashCode();
2582       if (hasRow()) {
2583         hash = (37 * hash) + ROW_FIELD_NUMBER;
2584         hash = (53 * hash) + getRow().hashCode();
2585       }
2586       if (getColumnCount() > 0) {
2587         hash = (37 * hash) + COLUMN_FIELD_NUMBER;
2588         hash = (53 * hash) + getColumnList().hashCode();
2589       }
2590       if (getAttributeCount() > 0) {
2591         hash = (37 * hash) + ATTRIBUTE_FIELD_NUMBER;
2592         hash = (53 * hash) + getAttributeList().hashCode();
2593       }
2594       if (hasFilter()) {
2595         hash = (37 * hash) + FILTER_FIELD_NUMBER;
2596         hash = (53 * hash) + getFilter().hashCode();
2597       }
2598       if (hasTimeRange()) {
2599         hash = (37 * hash) + TIME_RANGE_FIELD_NUMBER;
2600         hash = (53 * hash) + getTimeRange().hashCode();
2601       }
2602       if (hasMaxVersions()) {
2603         hash = (37 * hash) + MAX_VERSIONS_FIELD_NUMBER;
2604         hash = (53 * hash) + getMaxVersions();
2605       }
2606       if (hasCacheBlocks()) {
2607         hash = (37 * hash) + CACHE_BLOCKS_FIELD_NUMBER;
2608         hash = (53 * hash) + hashBoolean(getCacheBlocks());
2609       }
2610       if (hasStoreLimit()) {
2611         hash = (37 * hash) + STORE_LIMIT_FIELD_NUMBER;
2612         hash = (53 * hash) + getStoreLimit();
2613       }
2614       if (hasStoreOffset()) {
2615         hash = (37 * hash) + STORE_OFFSET_FIELD_NUMBER;
2616         hash = (53 * hash) + getStoreOffset();
2617       }
2618       if (hasExistenceOnly()) {
2619         hash = (37 * hash) + EXISTENCE_ONLY_FIELD_NUMBER;
2620         hash = (53 * hash) + hashBoolean(getExistenceOnly());
2621       }
2622       if (hasConsistency()) {
2623         hash = (37 * hash) + CONSISTENCY_FIELD_NUMBER;
2624         hash = (53 * hash) + hashEnum(getConsistency());
2625       }
2626       hash = (29 * hash) + getUnknownFields().hashCode();
2627       memoizedHashCode = hash;
2628       return hash;
2629     }
2630 
2631     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom(
2632         com.google.protobuf.ByteString data)
2633         throws com.google.protobuf.InvalidProtocolBufferException {
2634       return PARSER.parseFrom(data);
2635     }
2636     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom(
2637         com.google.protobuf.ByteString data,
2638         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2639         throws com.google.protobuf.InvalidProtocolBufferException {
2640       return PARSER.parseFrom(data, extensionRegistry);
2641     }
2642     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom(byte[] data)
2643         throws com.google.protobuf.InvalidProtocolBufferException {
2644       return PARSER.parseFrom(data);
2645     }
2646     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom(
2647         byte[] data,
2648         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2649         throws com.google.protobuf.InvalidProtocolBufferException {
2650       return PARSER.parseFrom(data, extensionRegistry);
2651     }
2652     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom(java.io.InputStream input)
2653         throws java.io.IOException {
2654       return PARSER.parseFrom(input);
2655     }
2656     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom(
2657         java.io.InputStream input,
2658         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2659         throws java.io.IOException {
2660       return PARSER.parseFrom(input, extensionRegistry);
2661     }
2662     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseDelimitedFrom(java.io.InputStream input)
2663         throws java.io.IOException {
2664       return PARSER.parseDelimitedFrom(input);
2665     }
2666     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseDelimitedFrom(
2667         java.io.InputStream input,
2668         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2669         throws java.io.IOException {
2670       return PARSER.parseDelimitedFrom(input, extensionRegistry);
2671     }
2672     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom(
2673         com.google.protobuf.CodedInputStream input)
2674         throws java.io.IOException {
2675       return PARSER.parseFrom(input);
2676     }
2677     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom(
2678         com.google.protobuf.CodedInputStream input,
2679         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2680         throws java.io.IOException {
2681       return PARSER.parseFrom(input, extensionRegistry);
2682     }
2683 
2684     public static Builder newBuilder() { return Builder.create(); }
2685     public Builder newBuilderForType() { return newBuilder(); }
2686     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get prototype) {
2687       return newBuilder().mergeFrom(prototype);
2688     }
2689     public Builder toBuilder() { return newBuilder(this); }
2690 
2691     @java.lang.Override
2692     protected Builder newBuilderForType(
2693         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2694       Builder builder = new Builder(parent);
2695       return builder;
2696     }
2697     /**
2698      * Protobuf type {@code hbase.pb.Get}
2699      *
2700      * <pre>
2701      **
2702      * The protocol buffer version of Get.
2703      * Unless existence_only is specified, return all the requested data
2704      * for the row that matches exactly.
2705      * </pre>
2706      */
2707     public static final class Builder extends
2708         com.google.protobuf.GeneratedMessage.Builder<Builder>
2709        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder {
2710       public static final com.google.protobuf.Descriptors.Descriptor
2711           getDescriptor() {
2712         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Get_descriptor;
2713       }
2714 
2715       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
2716           internalGetFieldAccessorTable() {
2717         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Get_fieldAccessorTable
2718             .ensureFieldAccessorsInitialized(
2719                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder.class);
2720       }
2721 
2722       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.newBuilder()
2723       private Builder() {
2724         maybeForceBuilderInitialization();
2725       }
2726 
2727       private Builder(
2728           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2729         super(parent);
2730         maybeForceBuilderInitialization();
2731       }
2732       private void maybeForceBuilderInitialization() {
2733         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
2734           getColumnFieldBuilder();
2735           getAttributeFieldBuilder();
2736           getFilterFieldBuilder();
2737           getTimeRangeFieldBuilder();
2738         }
2739       }
2740       private static Builder create() {
2741         return new Builder();
2742       }
2743 
2744       public Builder clear() {
2745         super.clear();
2746         row_ = com.google.protobuf.ByteString.EMPTY;
2747         bitField0_ = (bitField0_ & ~0x00000001);
2748         if (columnBuilder_ == null) {
2749           column_ = java.util.Collections.emptyList();
2750           bitField0_ = (bitField0_ & ~0x00000002);
2751         } else {
2752           columnBuilder_.clear();
2753         }
2754         if (attributeBuilder_ == null) {
2755           attribute_ = java.util.Collections.emptyList();
2756           bitField0_ = (bitField0_ & ~0x00000004);
2757         } else {
2758           attributeBuilder_.clear();
2759         }
2760         if (filterBuilder_ == null) {
2761           filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
2762         } else {
2763           filterBuilder_.clear();
2764         }
2765         bitField0_ = (bitField0_ & ~0x00000008);
2766         if (timeRangeBuilder_ == null) {
2767           timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance();
2768         } else {
2769           timeRangeBuilder_.clear();
2770         }
2771         bitField0_ = (bitField0_ & ~0x00000010);
2772         maxVersions_ = 1;
2773         bitField0_ = (bitField0_ & ~0x00000020);
2774         cacheBlocks_ = true;
2775         bitField0_ = (bitField0_ & ~0x00000040);
2776         storeLimit_ = 0;
2777         bitField0_ = (bitField0_ & ~0x00000080);
2778         storeOffset_ = 0;
2779         bitField0_ = (bitField0_ & ~0x00000100);
2780         existenceOnly_ = false;
2781         bitField0_ = (bitField0_ & ~0x00000200);
2782         consistency_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency.STRONG;
2783         bitField0_ = (bitField0_ & ~0x00000400);
2784         return this;
2785       }
2786 
2787       public Builder clone() {
2788         return create().mergeFrom(buildPartial());
2789       }
2790 
2791       public com.google.protobuf.Descriptors.Descriptor
2792           getDescriptorForType() {
2793         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Get_descriptor;
2794       }
2795 
2796       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getDefaultInstanceForType() {
2797         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance();
2798       }
2799 
2800       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get build() {
2801         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get result = buildPartial();
2802         if (!result.isInitialized()) {
2803           throw newUninitializedMessageException(result);
2804         }
2805         return result;
2806       }
2807 
2808       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get buildPartial() {
2809         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get(this);
2810         int from_bitField0_ = bitField0_;
2811         int to_bitField0_ = 0;
2812         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
2813           to_bitField0_ |= 0x00000001;
2814         }
2815         result.row_ = row_;
2816         if (columnBuilder_ == null) {
2817           if (((bitField0_ & 0x00000002) == 0x00000002)) {
2818             column_ = java.util.Collections.unmodifiableList(column_);
2819             bitField0_ = (bitField0_ & ~0x00000002);
2820           }
2821           result.column_ = column_;
2822         } else {
2823           result.column_ = columnBuilder_.build();
2824         }
2825         if (attributeBuilder_ == null) {
2826           if (((bitField0_ & 0x00000004) == 0x00000004)) {
2827             attribute_ = java.util.Collections.unmodifiableList(attribute_);
2828             bitField0_ = (bitField0_ & ~0x00000004);
2829           }
2830           result.attribute_ = attribute_;
2831         } else {
2832           result.attribute_ = attributeBuilder_.build();
2833         }
2834         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
2835           to_bitField0_ |= 0x00000002;
2836         }
2837         if (filterBuilder_ == null) {
2838           result.filter_ = filter_;
2839         } else {
2840           result.filter_ = filterBuilder_.build();
2841         }
2842         if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
2843           to_bitField0_ |= 0x00000004;
2844         }
2845         if (timeRangeBuilder_ == null) {
2846           result.timeRange_ = timeRange_;
2847         } else {
2848           result.timeRange_ = timeRangeBuilder_.build();
2849         }
2850         if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
2851           to_bitField0_ |= 0x00000008;
2852         }
2853         result.maxVersions_ = maxVersions_;
2854         if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
2855           to_bitField0_ |= 0x00000010;
2856         }
2857         result.cacheBlocks_ = cacheBlocks_;
2858         if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
2859           to_bitField0_ |= 0x00000020;
2860         }
2861         result.storeLimit_ = storeLimit_;
2862         if (((from_bitField0_ & 0x00000100) == 0x00000100)) {
2863           to_bitField0_ |= 0x00000040;
2864         }
2865         result.storeOffset_ = storeOffset_;
2866         if (((from_bitField0_ & 0x00000200) == 0x00000200)) {
2867           to_bitField0_ |= 0x00000080;
2868         }
2869         result.existenceOnly_ = existenceOnly_;
2870         if (((from_bitField0_ & 0x00000400) == 0x00000400)) {
2871           to_bitField0_ |= 0x00000100;
2872         }
2873         result.consistency_ = consistency_;
2874         result.bitField0_ = to_bitField0_;
2875         onBuilt();
2876         return result;
2877       }
2878 
2879       public Builder mergeFrom(com.google.protobuf.Message other) {
2880         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get) {
2881           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get)other);
2882         } else {
2883           super.mergeFrom(other);
2884           return this;
2885         }
2886       }
2887 
2888       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get other) {
2889         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance()) return this;
2890         if (other.hasRow()) {
2891           setRow(other.getRow());
2892         }
2893         if (columnBuilder_ == null) {
2894           if (!other.column_.isEmpty()) {
2895             if (column_.isEmpty()) {
2896               column_ = other.column_;
2897               bitField0_ = (bitField0_ & ~0x00000002);
2898             } else {
2899               ensureColumnIsMutable();
2900               column_.addAll(other.column_);
2901             }
2902             onChanged();
2903           }
2904         } else {
2905           if (!other.column_.isEmpty()) {
2906             if (columnBuilder_.isEmpty()) {
2907               columnBuilder_.dispose();
2908               columnBuilder_ = null;
2909               column_ = other.column_;
2910               bitField0_ = (bitField0_ & ~0x00000002);
2911               columnBuilder_ = 
2912                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
2913                    getColumnFieldBuilder() : null;
2914             } else {
2915               columnBuilder_.addAllMessages(other.column_);
2916             }
2917           }
2918         }
2919         if (attributeBuilder_ == null) {
2920           if (!other.attribute_.isEmpty()) {
2921             if (attribute_.isEmpty()) {
2922               attribute_ = other.attribute_;
2923               bitField0_ = (bitField0_ & ~0x00000004);
2924             } else {
2925               ensureAttributeIsMutable();
2926               attribute_.addAll(other.attribute_);
2927             }
2928             onChanged();
2929           }
2930         } else {
2931           if (!other.attribute_.isEmpty()) {
2932             if (attributeBuilder_.isEmpty()) {
2933               attributeBuilder_.dispose();
2934               attributeBuilder_ = null;
2935               attribute_ = other.attribute_;
2936               bitField0_ = (bitField0_ & ~0x00000004);
2937               attributeBuilder_ = 
2938                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
2939                    getAttributeFieldBuilder() : null;
2940             } else {
2941               attributeBuilder_.addAllMessages(other.attribute_);
2942             }
2943           }
2944         }
2945         if (other.hasFilter()) {
2946           mergeFilter(other.getFilter());
2947         }
2948         if (other.hasTimeRange()) {
2949           mergeTimeRange(other.getTimeRange());
2950         }
2951         if (other.hasMaxVersions()) {
2952           setMaxVersions(other.getMaxVersions());
2953         }
2954         if (other.hasCacheBlocks()) {
2955           setCacheBlocks(other.getCacheBlocks());
2956         }
2957         if (other.hasStoreLimit()) {
2958           setStoreLimit(other.getStoreLimit());
2959         }
2960         if (other.hasStoreOffset()) {
2961           setStoreOffset(other.getStoreOffset());
2962         }
2963         if (other.hasExistenceOnly()) {
2964           setExistenceOnly(other.getExistenceOnly());
2965         }
2966         if (other.hasConsistency()) {
2967           setConsistency(other.getConsistency());
2968         }
2969         this.mergeUnknownFields(other.getUnknownFields());
2970         return this;
2971       }
2972 
2973       public final boolean isInitialized() {
2974         if (!hasRow()) {
2975           
2976           return false;
2977         }
2978         for (int i = 0; i < getColumnCount(); i++) {
2979           if (!getColumn(i).isInitialized()) {
2980             
2981             return false;
2982           }
2983         }
2984         for (int i = 0; i < getAttributeCount(); i++) {
2985           if (!getAttribute(i).isInitialized()) {
2986             
2987             return false;
2988           }
2989         }
2990         if (hasFilter()) {
2991           if (!getFilter().isInitialized()) {
2992             
2993             return false;
2994           }
2995         }
2996         return true;
2997       }
2998 
2999       public Builder mergeFrom(
3000           com.google.protobuf.CodedInputStream input,
3001           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3002           throws java.io.IOException {
3003         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parsedMessage = null;
3004         try {
3005           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
3006         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
3007           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get) e.getUnfinishedMessage();
3008           throw e;
3009         } finally {
3010           if (parsedMessage != null) {
3011             mergeFrom(parsedMessage);
3012           }
3013         }
3014         return this;
3015       }
3016       private int bitField0_;
3017 
3018       // required bytes row = 1;
3019       private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY;
3020       /**
3021        * <code>required bytes row = 1;</code>
3022        */
3023       public boolean hasRow() {
3024         return ((bitField0_ & 0x00000001) == 0x00000001);
3025       }
3026       /**
3027        * <code>required bytes row = 1;</code>
3028        */
3029       public com.google.protobuf.ByteString getRow() {
3030         return row_;
3031       }
3032       /**
3033        * <code>required bytes row = 1;</code>
3034        */
3035       public Builder setRow(com.google.protobuf.ByteString value) {
3036         if (value == null) {
3037     throw new NullPointerException();
3038   }
3039   bitField0_ |= 0x00000001;
3040         row_ = value;
3041         onChanged();
3042         return this;
3043       }
3044       /**
3045        * <code>required bytes row = 1;</code>
3046        */
3047       public Builder clearRow() {
3048         bitField0_ = (bitField0_ & ~0x00000001);
3049         row_ = getDefaultInstance().getRow();
3050         onChanged();
3051         return this;
3052       }
3053 
3054       // repeated .hbase.pb.Column column = 2;
3055       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> column_ =
3056         java.util.Collections.emptyList();
3057       private void ensureColumnIsMutable() {
3058         if (!((bitField0_ & 0x00000002) == 0x00000002)) {
3059           column_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column>(column_);
3060           bitField0_ |= 0x00000002;
3061          }
3062       }
3063 
3064       private com.google.protobuf.RepeatedFieldBuilder<
3065           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder> columnBuilder_;
3066 
3067       /**
3068        * <code>repeated .hbase.pb.Column column = 2;</code>
3069        */
3070       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> getColumnList() {
3071         if (columnBuilder_ == null) {
3072           return java.util.Collections.unmodifiableList(column_);
3073         } else {
3074           return columnBuilder_.getMessageList();
3075         }
3076       }
3077       /**
3078        * <code>repeated .hbase.pb.Column column = 2;</code>
3079        */
3080       public int getColumnCount() {
3081         if (columnBuilder_ == null) {
3082           return column_.size();
3083         } else {
3084           return columnBuilder_.getCount();
3085         }
3086       }
3087       /**
3088        * <code>repeated .hbase.pb.Column column = 2;</code>
3089        */
3090       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index) {
3091         if (columnBuilder_ == null) {
3092           return column_.get(index);
3093         } else {
3094           return columnBuilder_.getMessage(index);
3095         }
3096       }
3097       /**
3098        * <code>repeated .hbase.pb.Column column = 2;</code>
3099        */
3100       public Builder setColumn(
3101           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) {
3102         if (columnBuilder_ == null) {
3103           if (value == null) {
3104             throw new NullPointerException();
3105           }
3106           ensureColumnIsMutable();
3107           column_.set(index, value);
3108           onChanged();
3109         } else {
3110           columnBuilder_.setMessage(index, value);
3111         }
3112         return this;
3113       }
3114       /**
3115        * <code>repeated .hbase.pb.Column column = 2;</code>
3116        */
3117       public Builder setColumn(
3118           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) {
3119         if (columnBuilder_ == null) {
3120           ensureColumnIsMutable();
3121           column_.set(index, builderForValue.build());
3122           onChanged();
3123         } else {
3124           columnBuilder_.setMessage(index, builderForValue.build());
3125         }
3126         return this;
3127       }
3128       /**
3129        * <code>repeated .hbase.pb.Column column = 2;</code>
3130        */
3131       public Builder addColumn(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) {
3132         if (columnBuilder_ == null) {
3133           if (value == null) {
3134             throw new NullPointerException();
3135           }
3136           ensureColumnIsMutable();
3137           column_.add(value);
3138           onChanged();
3139         } else {
3140           columnBuilder_.addMessage(value);
3141         }
3142         return this;
3143       }
3144       /**
3145        * <code>repeated .hbase.pb.Column column = 2;</code>
3146        */
3147       public Builder addColumn(
3148           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) {
3149         if (columnBuilder_ == null) {
3150           if (value == null) {
3151             throw new NullPointerException();
3152           }
3153           ensureColumnIsMutable();
3154           column_.add(index, value);
3155           onChanged();
3156         } else {
3157           columnBuilder_.addMessage(index, value);
3158         }
3159         return this;
3160       }
3161       /**
3162        * <code>repeated .hbase.pb.Column column = 2;</code>
3163        */
3164       public Builder addColumn(
3165           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) {
3166         if (columnBuilder_ == null) {
3167           ensureColumnIsMutable();
3168           column_.add(builderForValue.build());
3169           onChanged();
3170         } else {
3171           columnBuilder_.addMessage(builderForValue.build());
3172         }
3173         return this;
3174       }
3175       /**
3176        * <code>repeated .hbase.pb.Column column = 2;</code>
3177        */
3178       public Builder addColumn(
3179           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) {
3180         if (columnBuilder_ == null) {
3181           ensureColumnIsMutable();
3182           column_.add(index, builderForValue.build());
3183           onChanged();
3184         } else {
3185           columnBuilder_.addMessage(index, builderForValue.build());
3186         }
3187         return this;
3188       }
3189       /**
3190        * <code>repeated .hbase.pb.Column column = 2;</code>
3191        */
3192       public Builder addAllColumn(
3193           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> values) {
3194         if (columnBuilder_ == null) {
3195           ensureColumnIsMutable();
3196           super.addAll(values, column_);
3197           onChanged();
3198         } else {
3199           columnBuilder_.addAllMessages(values);
3200         }
3201         return this;
3202       }
3203       /**
3204        * <code>repeated .hbase.pb.Column column = 2;</code>
3205        */
3206       public Builder clearColumn() {
3207         if (columnBuilder_ == null) {
3208           column_ = java.util.Collections.emptyList();
3209           bitField0_ = (bitField0_ & ~0x00000002);
3210           onChanged();
3211         } else {
3212           columnBuilder_.clear();
3213         }
3214         return this;
3215       }
3216       /**
3217        * <code>repeated .hbase.pb.Column column = 2;</code>
3218        */
3219       public Builder removeColumn(int index) {
3220         if (columnBuilder_ == null) {
3221           ensureColumnIsMutable();
3222           column_.remove(index);
3223           onChanged();
3224         } else {
3225           columnBuilder_.remove(index);
3226         }
3227         return this;
3228       }
3229       /**
3230        * <code>repeated .hbase.pb.Column column = 2;</code>
3231        */
3232       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder getColumnBuilder(
3233           int index) {
3234         return getColumnFieldBuilder().getBuilder(index);
3235       }
3236       /**
3237        * <code>repeated .hbase.pb.Column column = 2;</code>
3238        */
3239       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder(
3240           int index) {
3241         if (columnBuilder_ == null) {
3242           return column_.get(index);  } else {
3243           return columnBuilder_.getMessageOrBuilder(index);
3244         }
3245       }
3246       /**
3247        * <code>repeated .hbase.pb.Column column = 2;</code>
3248        */
3249       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder> 
3250            getColumnOrBuilderList() {
3251         if (columnBuilder_ != null) {
3252           return columnBuilder_.getMessageOrBuilderList();
3253         } else {
3254           return java.util.Collections.unmodifiableList(column_);
3255         }
3256       }
3257       /**
3258        * <code>repeated .hbase.pb.Column column = 2;</code>
3259        */
3260       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder addColumnBuilder() {
3261         return getColumnFieldBuilder().addBuilder(
3262             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance());
3263       }
3264       /**
3265        * <code>repeated .hbase.pb.Column column = 2;</code>
3266        */
3267       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder addColumnBuilder(
3268           int index) {
3269         return getColumnFieldBuilder().addBuilder(
3270             index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance());
3271       }
3272       /**
3273        * <code>repeated .hbase.pb.Column column = 2;</code>
3274        */
3275       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder> 
3276            getColumnBuilderList() {
3277         return getColumnFieldBuilder().getBuilderList();
3278       }
3279       private com.google.protobuf.RepeatedFieldBuilder<
3280           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder> 
3281           getColumnFieldBuilder() {
3282         if (columnBuilder_ == null) {
3283           columnBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
3284               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder>(
3285                   column_,
3286                   ((bitField0_ & 0x00000002) == 0x00000002),
3287                   getParentForChildren(),
3288                   isClean());
3289           column_ = null;
3290         }
3291         return columnBuilder_;
3292       }
3293 
3294       // repeated .hbase.pb.NameBytesPair attribute = 3;
3295       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> attribute_ =
3296         java.util.Collections.emptyList();
3297       private void ensureAttributeIsMutable() {
3298         if (!((bitField0_ & 0x00000004) == 0x00000004)) {
3299           attribute_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair>(attribute_);
3300           bitField0_ |= 0x00000004;
3301          }
3302       }
3303 
3304       private com.google.protobuf.RepeatedFieldBuilder<
3305           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> attributeBuilder_;
3306 
3307       /**
3308        * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
3309        */
3310       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> getAttributeList() {
3311         if (attributeBuilder_ == null) {
3312           return java.util.Collections.unmodifiableList(attribute_);
3313         } else {
3314           return attributeBuilder_.getMessageList();
3315         }
3316       }
3317       /**
3318        * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
3319        */
3320       public int getAttributeCount() {
3321         if (attributeBuilder_ == null) {
3322           return attribute_.size();
3323         } else {
3324           return attributeBuilder_.getCount();
3325         }
3326       }
3327       /**
3328        * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
3329        */
3330       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) {
3331         if (attributeBuilder_ == null) {
3332           return attribute_.get(index);
3333         } else {
3334           return attributeBuilder_.getMessage(index);
3335         }
3336       }
3337       /**
3338        * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
3339        */
3340       public Builder setAttribute(
3341           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
3342         if (attributeBuilder_ == null) {
3343           if (value == null) {
3344             throw new NullPointerException();
3345           }
3346           ensureAttributeIsMutable();
3347           attribute_.set(index, value);
3348           onChanged();
3349         } else {
3350           attributeBuilder_.setMessage(index, value);
3351         }
3352         return this;
3353       }
3354       /**
3355        * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
3356        */
3357       public Builder setAttribute(
3358           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
3359         if (attributeBuilder_ == null) {
3360           ensureAttributeIsMutable();
3361           attribute_.set(index, builderForValue.build());
3362           onChanged();
3363         } else {
3364           attributeBuilder_.setMessage(index, builderForValue.build());
3365         }
3366         return this;
3367       }
3368       /**
3369        * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
3370        */
3371       public Builder addAttribute(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
3372         if (attributeBuilder_ == null) {
3373           if (value == null) {
3374             throw new NullPointerException();
3375           }
3376           ensureAttributeIsMutable();
3377           attribute_.add(value);
3378           onChanged();
3379         } else {
3380           attributeBuilder_.addMessage(value);
3381         }
3382         return this;
3383       }
3384       /**
3385        * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
3386        */
3387       public Builder addAttribute(
3388           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
3389         if (attributeBuilder_ == null) {
3390           if (value == null) {
3391             throw new NullPointerException();
3392           }
3393           ensureAttributeIsMutable();
3394           attribute_.add(index, value);
3395           onChanged();
3396         } else {
3397           attributeBuilder_.addMessage(index, value);
3398         }
3399         return this;
3400       }
3401       /**
3402        * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
3403        */
3404       public Builder addAttribute(
3405           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
3406         if (attributeBuilder_ == null) {
3407           ensureAttributeIsMutable();
3408           attribute_.add(builderForValue.build());
3409           onChanged();
3410         } else {
3411           attributeBuilder_.addMessage(builderForValue.build());
3412         }
3413         return this;
3414       }
3415       /**
3416        * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
3417        */
3418       public Builder addAttribute(
3419           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
3420         if (attributeBuilder_ == null) {
3421           ensureAttributeIsMutable();
3422           attribute_.add(index, builderForValue.build());
3423           onChanged();
3424         } else {
3425           attributeBuilder_.addMessage(index, builderForValue.build());
3426         }
3427         return this;
3428       }
3429       /**
3430        * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
3431        */
3432       public Builder addAllAttribute(
3433           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> values) {
3434         if (attributeBuilder_ == null) {
3435           ensureAttributeIsMutable();
3436           super.addAll(values, attribute_);
3437           onChanged();
3438         } else {
3439           attributeBuilder_.addAllMessages(values);
3440         }
3441         return this;
3442       }
3443       /**
3444        * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
3445        */
3446       public Builder clearAttribute() {
3447         if (attributeBuilder_ == null) {
3448           attribute_ = java.util.Collections.emptyList();
3449           bitField0_ = (bitField0_ & ~0x00000004);
3450           onChanged();
3451         } else {
3452           attributeBuilder_.clear();
3453         }
3454         return this;
3455       }
3456       /**
3457        * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
3458        */
3459       public Builder removeAttribute(int index) {
3460         if (attributeBuilder_ == null) {
3461           ensureAttributeIsMutable();
3462           attribute_.remove(index);
3463           onChanged();
3464         } else {
3465           attributeBuilder_.remove(index);
3466         }
3467         return this;
3468       }
3469       /**
3470        * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
3471        */
3472       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getAttributeBuilder(
3473           int index) {
3474         return getAttributeFieldBuilder().getBuilder(index);
3475       }
3476       /**
3477        * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
3478        */
3479       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder(
3480           int index) {
3481         if (attributeBuilder_ == null) {
3482           return attribute_.get(index);  } else {
3483           return attributeBuilder_.getMessageOrBuilder(index);
3484         }
3485       }
3486       /**
3487        * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
3488        */
3489       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> 
3490            getAttributeOrBuilderList() {
3491         if (attributeBuilder_ != null) {
3492           return attributeBuilder_.getMessageOrBuilderList();
3493         } else {
3494           return java.util.Collections.unmodifiableList(attribute_);
3495         }
3496       }
3497       /**
3498        * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
3499        */
3500       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder() {
3501         return getAttributeFieldBuilder().addBuilder(
3502             org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance());
3503       }
3504       /**
3505        * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
3506        */
3507       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder(
3508           int index) {
3509         return getAttributeFieldBuilder().addBuilder(
3510             index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance());
3511       }
3512       /**
3513        * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
3514        */
3515       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder> 
3516            getAttributeBuilderList() {
3517         return getAttributeFieldBuilder().getBuilderList();
3518       }
3519       private com.google.protobuf.RepeatedFieldBuilder<
3520           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> 
3521           getAttributeFieldBuilder() {
3522         if (attributeBuilder_ == null) {
3523           attributeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
3524               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>(
3525                   attribute_,
3526                   ((bitField0_ & 0x00000004) == 0x00000004),
3527                   getParentForChildren(),
3528                   isClean());
3529           attribute_ = null;
3530         }
3531         return attributeBuilder_;
3532       }
3533 
3534       // optional .hbase.pb.Filter filter = 4;
3535       private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
3536       private com.google.protobuf.SingleFieldBuilder<
3537           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder> filterBuilder_;
3538       /**
3539        * <code>optional .hbase.pb.Filter filter = 4;</code>
3540        */
3541       public boolean hasFilter() {
3542         return ((bitField0_ & 0x00000008) == 0x00000008);
3543       }
3544       /**
3545        * <code>optional .hbase.pb.Filter filter = 4;</code>
3546        */
3547       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter() {
3548         if (filterBuilder_ == null) {
3549           return filter_;
3550         } else {
3551           return filterBuilder_.getMessage();
3552         }
3553       }
3554       /**
3555        * <code>optional .hbase.pb.Filter filter = 4;</code>
3556        */
3557       public Builder setFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value) {
3558         if (filterBuilder_ == null) {
3559           if (value == null) {
3560             throw new NullPointerException();
3561           }
3562           filter_ = value;
3563           onChanged();
3564         } else {
3565           filterBuilder_.setMessage(value);
3566         }
3567         bitField0_ |= 0x00000008;
3568         return this;
3569       }
3570       /**
3571        * <code>optional .hbase.pb.Filter filter = 4;</code>
3572        */
3573       public Builder setFilter(
3574           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder builderForValue) {
3575         if (filterBuilder_ == null) {
3576           filter_ = builderForValue.build();
3577           onChanged();
3578         } else {
3579           filterBuilder_.setMessage(builderForValue.build());
3580         }
3581         bitField0_ |= 0x00000008;
3582         return this;
3583       }
3584       /**
3585        * <code>optional .hbase.pb.Filter filter = 4;</code>
3586        */
3587       public Builder mergeFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value) {
3588         if (filterBuilder_ == null) {
3589           if (((bitField0_ & 0x00000008) == 0x00000008) &&
3590               filter_ != org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance()) {
3591             filter_ =
3592               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.newBuilder(filter_).mergeFrom(value).buildPartial();
3593           } else {
3594             filter_ = value;
3595           }
3596           onChanged();
3597         } else {
3598           filterBuilder_.mergeFrom(value);
3599         }
3600         bitField0_ |= 0x00000008;
3601         return this;
3602       }
3603       /**
3604        * <code>optional .hbase.pb.Filter filter = 4;</code>
3605        */
3606       public Builder clearFilter() {
3607         if (filterBuilder_ == null) {
3608           filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
3609           onChanged();
3610         } else {
3611           filterBuilder_.clear();
3612         }
3613         bitField0_ = (bitField0_ & ~0x00000008);
3614         return this;
3615       }
3616       /**
3617        * <code>optional .hbase.pb.Filter filter = 4;</code>
3618        */
3619       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder getFilterBuilder() {
3620         bitField0_ |= 0x00000008;
3621         onChanged();
3622         return getFilterFieldBuilder().getBuilder();
3623       }
3624       /**
3625        * <code>optional .hbase.pb.Filter filter = 4;</code>
3626        */
3627       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder() {
3628         if (filterBuilder_ != null) {
3629           return filterBuilder_.getMessageOrBuilder();
3630         } else {
3631           return filter_;
3632         }
3633       }
3634       /**
3635        * <code>optional .hbase.pb.Filter filter = 4;</code>
3636        */
3637       private com.google.protobuf.SingleFieldBuilder<
3638           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder> 
3639           getFilterFieldBuilder() {
3640         if (filterBuilder_ == null) {
3641           filterBuilder_ = new com.google.protobuf.SingleFieldBuilder<
3642               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder>(
3643                   filter_,
3644                   getParentForChildren(),
3645                   isClean());
3646           filter_ = null;
3647         }
3648         return filterBuilder_;
3649       }
3650 
3651       // optional .hbase.pb.TimeRange time_range = 5;
3652       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance();
3653       private com.google.protobuf.SingleFieldBuilder<
3654           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> timeRangeBuilder_;
3655       /**
3656        * <code>optional .hbase.pb.TimeRange time_range = 5;</code>
3657        */
3658       public boolean hasTimeRange() {
3659         return ((bitField0_ & 0x00000010) == 0x00000010);
3660       }
3661       /**
3662        * <code>optional .hbase.pb.TimeRange time_range = 5;</code>
3663        */
3664       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() {
3665         if (timeRangeBuilder_ == null) {
3666           return timeRange_;
3667         } else {
3668           return timeRangeBuilder_.getMessage();
3669         }
3670       }
3671       /**
3672        * <code>optional .hbase.pb.TimeRange time_range = 5;</code>
3673        */
3674       public Builder setTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) {
3675         if (timeRangeBuilder_ == null) {
3676           if (value == null) {
3677             throw new NullPointerException();
3678           }
3679           timeRange_ = value;
3680           onChanged();
3681         } else {
3682           timeRangeBuilder_.setMessage(value);
3683         }
3684         bitField0_ |= 0x00000010;
3685         return this;
3686       }
3687       /**
3688        * <code>optional .hbase.pb.TimeRange time_range = 5;</code>
3689        */
3690       public Builder setTimeRange(
3691           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder builderForValue) {
3692         if (timeRangeBuilder_ == null) {
3693           timeRange_ = builderForValue.build();
3694           onChanged();
3695         } else {
3696           timeRangeBuilder_.setMessage(builderForValue.build());
3697         }
3698         bitField0_ |= 0x00000010;
3699         return this;
3700       }
3701       /**
3702        * <code>optional .hbase.pb.TimeRange time_range = 5;</code>
3703        */
3704       public Builder mergeTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) {
3705         if (timeRangeBuilder_ == null) {
3706           if (((bitField0_ & 0x00000010) == 0x00000010) &&
3707               timeRange_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance()) {
3708             timeRange_ =
3709               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder(timeRange_).mergeFrom(value).buildPartial();
3710           } else {
3711             timeRange_ = value;
3712           }
3713           onChanged();
3714         } else {
3715           timeRangeBuilder_.mergeFrom(value);
3716         }
3717         bitField0_ |= 0x00000010;
3718         return this;
3719       }
3720       /**
3721        * <code>optional .hbase.pb.TimeRange time_range = 5;</code>
3722        */
3723       public Builder clearTimeRange() {
3724         if (timeRangeBuilder_ == null) {
3725           timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance();
3726           onChanged();
3727         } else {
3728           timeRangeBuilder_.clear();
3729         }
3730         bitField0_ = (bitField0_ & ~0x00000010);
3731         return this;
3732       }
3733       /**
3734        * <code>optional .hbase.pb.TimeRange time_range = 5;</code>
3735        */
3736       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder getTimeRangeBuilder() {
3737         bitField0_ |= 0x00000010;
3738         onChanged();
3739         return getTimeRangeFieldBuilder().getBuilder();
3740       }
3741       /**
3742        * <code>optional .hbase.pb.TimeRange time_range = 5;</code>
3743        */
3744       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() {
3745         if (timeRangeBuilder_ != null) {
3746           return timeRangeBuilder_.getMessageOrBuilder();
3747         } else {
3748           return timeRange_;
3749         }
3750       }
3751       /**
3752        * <code>optional .hbase.pb.TimeRange time_range = 5;</code>
3753        */
3754       private com.google.protobuf.SingleFieldBuilder<
3755           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> 
3756           getTimeRangeFieldBuilder() {
3757         if (timeRangeBuilder_ == null) {
3758           timeRangeBuilder_ = new com.google.protobuf.SingleFieldBuilder<
3759               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder>(
3760                   timeRange_,
3761                   getParentForChildren(),
3762                   isClean());
3763           timeRange_ = null;
3764         }
3765         return timeRangeBuilder_;
3766       }
3767 
3768       // optional uint32 max_versions = 6 [default = 1];
3769       private int maxVersions_ = 1;
3770       /**
3771        * <code>optional uint32 max_versions = 6 [default = 1];</code>
3772        */
3773       public boolean hasMaxVersions() {
3774         return ((bitField0_ & 0x00000020) == 0x00000020);
3775       }
3776       /**
3777        * <code>optional uint32 max_versions = 6 [default = 1];</code>
3778        */
3779       public int getMaxVersions() {
3780         return maxVersions_;
3781       }
3782       /**
3783        * <code>optional uint32 max_versions = 6 [default = 1];</code>
3784        */
3785       public Builder setMaxVersions(int value) {
3786         bitField0_ |= 0x00000020;
3787         maxVersions_ = value;
3788         onChanged();
3789         return this;
3790       }
3791       /**
3792        * <code>optional uint32 max_versions = 6 [default = 1];</code>
3793        */
3794       public Builder clearMaxVersions() {
3795         bitField0_ = (bitField0_ & ~0x00000020);
3796         maxVersions_ = 1;
3797         onChanged();
3798         return this;
3799       }
3800 
3801       // optional bool cache_blocks = 7 [default = true];
3802       private boolean cacheBlocks_ = true;
3803       /**
3804        * <code>optional bool cache_blocks = 7 [default = true];</code>
3805        */
3806       public boolean hasCacheBlocks() {
3807         return ((bitField0_ & 0x00000040) == 0x00000040);
3808       }
3809       /**
3810        * <code>optional bool cache_blocks = 7 [default = true];</code>
3811        */
3812       public boolean getCacheBlocks() {
3813         return cacheBlocks_;
3814       }
3815       /**
3816        * <code>optional bool cache_blocks = 7 [default = true];</code>
3817        */
3818       public Builder setCacheBlocks(boolean value) {
3819         bitField0_ |= 0x00000040;
3820         cacheBlocks_ = value;
3821         onChanged();
3822         return this;
3823       }
3824       /**
3825        * <code>optional bool cache_blocks = 7 [default = true];</code>
3826        */
3827       public Builder clearCacheBlocks() {
3828         bitField0_ = (bitField0_ & ~0x00000040);
3829         cacheBlocks_ = true;
3830         onChanged();
3831         return this;
3832       }
3833 
3834       // optional uint32 store_limit = 8;
3835       private int storeLimit_ ;
3836       /**
3837        * <code>optional uint32 store_limit = 8;</code>
3838        */
3839       public boolean hasStoreLimit() {
3840         return ((bitField0_ & 0x00000080) == 0x00000080);
3841       }
3842       /**
3843        * <code>optional uint32 store_limit = 8;</code>
3844        */
3845       public int getStoreLimit() {
3846         return storeLimit_;
3847       }
3848       /**
3849        * <code>optional uint32 store_limit = 8;</code>
3850        */
3851       public Builder setStoreLimit(int value) {
3852         bitField0_ |= 0x00000080;
3853         storeLimit_ = value;
3854         onChanged();
3855         return this;
3856       }
3857       /**
3858        * <code>optional uint32 store_limit = 8;</code>
3859        */
3860       public Builder clearStoreLimit() {
3861         bitField0_ = (bitField0_ & ~0x00000080);
3862         storeLimit_ = 0;
3863         onChanged();
3864         return this;
3865       }
3866 
3867       // optional uint32 store_offset = 9;
3868       private int storeOffset_ ;
3869       /**
3870        * <code>optional uint32 store_offset = 9;</code>
3871        */
3872       public boolean hasStoreOffset() {
3873         return ((bitField0_ & 0x00000100) == 0x00000100);
3874       }
3875       /**
3876        * <code>optional uint32 store_offset = 9;</code>
3877        */
3878       public int getStoreOffset() {
3879         return storeOffset_;
3880       }
3881       /**
3882        * <code>optional uint32 store_offset = 9;</code>
3883        */
3884       public Builder setStoreOffset(int value) {
3885         bitField0_ |= 0x00000100;
3886         storeOffset_ = value;
3887         onChanged();
3888         return this;
3889       }
3890       /**
3891        * <code>optional uint32 store_offset = 9;</code>
3892        */
3893       public Builder clearStoreOffset() {
3894         bitField0_ = (bitField0_ & ~0x00000100);
3895         storeOffset_ = 0;
3896         onChanged();
3897         return this;
3898       }
3899 
3900       // optional bool existence_only = 10 [default = false];
3901       private boolean existenceOnly_ ;
3902       /**
3903        * <code>optional bool existence_only = 10 [default = false];</code>
3904        *
3905        * <pre>
3906        * The result isn't asked for, just check for
3907        * the existence.
3908        * </pre>
3909        */
3910       public boolean hasExistenceOnly() {
3911         return ((bitField0_ & 0x00000200) == 0x00000200);
3912       }
3913       /**
3914        * <code>optional bool existence_only = 10 [default = false];</code>
3915        *
3916        * <pre>
3917        * The result isn't asked for, just check for
3918        * the existence.
3919        * </pre>
3920        */
3921       public boolean getExistenceOnly() {
3922         return existenceOnly_;
3923       }
3924       /**
3925        * <code>optional bool existence_only = 10 [default = false];</code>
3926        *
3927        * <pre>
3928        * The result isn't asked for, just check for
3929        * the existence.
3930        * </pre>
3931        */
3932       public Builder setExistenceOnly(boolean value) {
3933         bitField0_ |= 0x00000200;
3934         existenceOnly_ = value;
3935         onChanged();
3936         return this;
3937       }
3938       /**
3939        * <code>optional bool existence_only = 10 [default = false];</code>
3940        *
3941        * <pre>
3942        * The result isn't asked for, just check for
3943        * the existence.
3944        * </pre>
3945        */
3946       public Builder clearExistenceOnly() {
3947         bitField0_ = (bitField0_ & ~0x00000200);
3948         existenceOnly_ = false;
3949         onChanged();
3950         return this;
3951       }
3952 
3953       // optional .hbase.pb.Consistency consistency = 12 [default = STRONG];
3954       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency consistency_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency.STRONG;
3955       /**
3956        * <code>optional .hbase.pb.Consistency consistency = 12 [default = STRONG];</code>
3957        */
3958       public boolean hasConsistency() {
3959         return ((bitField0_ & 0x00000400) == 0x00000400);
3960       }
3961       /**
3962        * <code>optional .hbase.pb.Consistency consistency = 12 [default = STRONG];</code>
3963        */
3964       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency getConsistency() {
3965         return consistency_;
3966       }
3967       /**
3968        * <code>optional .hbase.pb.Consistency consistency = 12 [default = STRONG];</code>
3969        */
3970       public Builder setConsistency(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency value) {
3971         if (value == null) {
3972           throw new NullPointerException();
3973         }
3974         bitField0_ |= 0x00000400;
3975         consistency_ = value;
3976         onChanged();
3977         return this;
3978       }
3979       /**
3980        * <code>optional .hbase.pb.Consistency consistency = 12 [default = STRONG];</code>
3981        */
3982       public Builder clearConsistency() {
3983         bitField0_ = (bitField0_ & ~0x00000400);
3984         consistency_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency.STRONG;
3985         onChanged();
3986         return this;
3987       }
3988 
3989       // @@protoc_insertion_point(builder_scope:hbase.pb.Get)
3990     }
3991 
3992     static {
3993       defaultInstance = new Get(true);
3994       defaultInstance.initFields();
3995     }
3996 
3997     // @@protoc_insertion_point(class_scope:hbase.pb.Get)
3998   }
3999 
4000   public interface ResultOrBuilder
4001       extends com.google.protobuf.MessageOrBuilder {
4002 
4003     // repeated .hbase.pb.Cell cell = 1;
4004     /**
4005      * <code>repeated .hbase.pb.Cell cell = 1;</code>
4006      *
4007      * <pre>
4008      * Result includes the Cells or else it just has a count of Cells
4009      * that are carried otherwise.
4010      * </pre>
4011      */
4012     java.util.List<org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell> 
4013         getCellList();
4014     /**
4015      * <code>repeated .hbase.pb.Cell cell = 1;</code>
4016      *
4017      * <pre>
4018      * Result includes the Cells or else it just has a count of Cells
4019      * that are carried otherwise.
4020      * </pre>
4021      */
4022     org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell getCell(int index);
4023     /**
4024      * <code>repeated .hbase.pb.Cell cell = 1;</code>
4025      *
4026      * <pre>
4027      * Result includes the Cells or else it just has a count of Cells
4028      * that are carried otherwise.
4029      * </pre>
4030      */
4031     int getCellCount();
4032     /**
4033      * <code>repeated .hbase.pb.Cell cell = 1;</code>
4034      *
4035      * <pre>
4036      * Result includes the Cells or else it just has a count of Cells
4037      * that are carried otherwise.
4038      * </pre>
4039      */
4040     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellOrBuilder> 
4041         getCellOrBuilderList();
4042     /**
4043      * <code>repeated .hbase.pb.Cell cell = 1;</code>
4044      *
4045      * <pre>
4046      * Result includes the Cells or else it just has a count of Cells
4047      * that are carried otherwise.
4048      * </pre>
4049      */
4050     org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellOrBuilder getCellOrBuilder(
4051         int index);
4052 
4053     // optional int32 associated_cell_count = 2;
4054     /**
4055      * <code>optional int32 associated_cell_count = 2;</code>
4056      *
4057      * <pre>
4058      * The below count is set when the associated cells are
4059      * not part of this protobuf message; they are passed alongside
4060      * and then this Message is just a placeholder with metadata.
4061      * The count is needed to know how many to peel off the block of Cells as
4062      * ours.  NOTE: This is different from the pb managed cell_count of the
4063      * 'cell' field above which is non-null when the cells are pb'd.
4064      * </pre>
4065      */
4066     boolean hasAssociatedCellCount();
4067     /**
4068      * <code>optional int32 associated_cell_count = 2;</code>
4069      *
4070      * <pre>
4071      * The below count is set when the associated cells are
4072      * not part of this protobuf message; they are passed alongside
4073      * and then this Message is just a placeholder with metadata.
4074      * The count is needed to know how many to peel off the block of Cells as
4075      * ours.  NOTE: This is different from the pb managed cell_count of the
4076      * 'cell' field above which is non-null when the cells are pb'd.
4077      * </pre>
4078      */
4079     int getAssociatedCellCount();
4080 
4081     // optional bool exists = 3;
4082     /**
4083      * <code>optional bool exists = 3;</code>
4084      *
4085      * <pre>
4086      * used for Get to check existence only. Not set if existence_only was not set to true
4087      *  in the query.
4088      * </pre>
4089      */
4090     boolean hasExists();
4091     /**
4092      * <code>optional bool exists = 3;</code>
4093      *
4094      * <pre>
4095      * used for Get to check existence only. Not set if existence_only was not set to true
4096      *  in the query.
4097      * </pre>
4098      */
4099     boolean getExists();
4100 
4101     // optional bool stale = 4 [default = false];
4102     /**
4103      * <code>optional bool stale = 4 [default = false];</code>
4104      *
4105      * <pre>
4106      * Whether or not the results are coming from possibly stale data 
4107      * </pre>
4108      */
4109     boolean hasStale();
4110     /**
4111      * <code>optional bool stale = 4 [default = false];</code>
4112      *
4113      * <pre>
4114      * Whether or not the results are coming from possibly stale data 
4115      * </pre>
4116      */
4117     boolean getStale();
4118 
4119     // optional bool partial = 5 [default = false];
4120     /**
4121      * <code>optional bool partial = 5 [default = false];</code>
4122      *
4123      * <pre>
4124      * Whether or not the entire result could be returned. Results will be split when
4125      * the RPC chunk size limit is reached. Partial results contain only a subset of the
4126      * cells for a row and must be combined with a result containing the remaining cells
4127      * to form a complete result
4128      * </pre>
4129      */
4130     boolean hasPartial();
4131     /**
4132      * <code>optional bool partial = 5 [default = false];</code>
4133      *
4134      * <pre>
4135      * Whether or not the entire result could be returned. Results will be split when
4136      * the RPC chunk size limit is reached. Partial results contain only a subset of the
4137      * cells for a row and must be combined with a result containing the remaining cells
4138      * to form a complete result
4139      * </pre>
4140      */
4141     boolean getPartial();
4142   }
4143   /**
4144    * Protobuf type {@code hbase.pb.Result}
4145    */
4146   public static final class Result extends
4147       com.google.protobuf.GeneratedMessage
4148       implements ResultOrBuilder {
4149     // Use Result.newBuilder() to construct.
4150     private Result(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
4151       super(builder);
4152       this.unknownFields = builder.getUnknownFields();
4153     }
4154     private Result(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
4155 
4156     private static final Result defaultInstance;
4157     public static Result getDefaultInstance() {
4158       return defaultInstance;
4159     }
4160 
4161     public Result getDefaultInstanceForType() {
4162       return defaultInstance;
4163     }
4164 
4165     private final com.google.protobuf.UnknownFieldSet unknownFields;
4166     @java.lang.Override
4167     public final com.google.protobuf.UnknownFieldSet
4168         getUnknownFields() {
4169       return this.unknownFields;
4170     }
4171     private Result(
4172         com.google.protobuf.CodedInputStream input,
4173         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4174         throws com.google.protobuf.InvalidProtocolBufferException {
4175       initFields();
4176       int mutable_bitField0_ = 0;
4177       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
4178           com.google.protobuf.UnknownFieldSet.newBuilder();
4179       try {
4180         boolean done = false;
4181         while (!done) {
4182           int tag = input.readTag();
4183           switch (tag) {
4184             case 0:
4185               done = true;
4186               break;
4187             default: {
4188               if (!parseUnknownField(input, unknownFields,
4189                                      extensionRegistry, tag)) {
4190                 done = true;
4191               }
4192               break;
4193             }
4194             case 10: {
4195               if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
4196                 cell_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell>();
4197                 mutable_bitField0_ |= 0x00000001;
4198               }
4199               cell_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.PARSER, extensionRegistry));
4200               break;
4201             }
4202             case 16: {
4203               bitField0_ |= 0x00000001;
4204               associatedCellCount_ = input.readInt32();
4205               break;
4206             }
4207             case 24: {
4208               bitField0_ |= 0x00000002;
4209               exists_ = input.readBool();
4210               break;
4211             }
4212             case 32: {
4213               bitField0_ |= 0x00000004;
4214               stale_ = input.readBool();
4215               break;
4216             }
4217             case 40: {
4218               bitField0_ |= 0x00000008;
4219               partial_ = input.readBool();
4220               break;
4221             }
4222           }
4223         }
4224       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
4225         throw e.setUnfinishedMessage(this);
4226       } catch (java.io.IOException e) {
4227         throw new com.google.protobuf.InvalidProtocolBufferException(
4228             e.getMessage()).setUnfinishedMessage(this);
4229       } finally {
4230         if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
4231           cell_ = java.util.Collections.unmodifiableList(cell_);
4232         }
4233         this.unknownFields = unknownFields.build();
4234         makeExtensionsImmutable();
4235       }
4236     }
4237     public static final com.google.protobuf.Descriptors.Descriptor
4238         getDescriptor() {
4239       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Result_descriptor;
4240     }
4241 
4242     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
4243         internalGetFieldAccessorTable() {
4244       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Result_fieldAccessorTable
4245           .ensureFieldAccessorsInitialized(
4246               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder.class);
4247     }
4248 
4249     public static com.google.protobuf.Parser<Result> PARSER =
4250         new com.google.protobuf.AbstractParser<Result>() {
4251       public Result parsePartialFrom(
4252           com.google.protobuf.CodedInputStream input,
4253           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4254           throws com.google.protobuf.InvalidProtocolBufferException {
4255         return new Result(input, extensionRegistry);
4256       }
4257     };
4258 
4259     @java.lang.Override
4260     public com.google.protobuf.Parser<Result> getParserForType() {
4261       return PARSER;
4262     }
4263 
4264     private int bitField0_;
4265     // repeated .hbase.pb.Cell cell = 1;
4266     public static final int CELL_FIELD_NUMBER = 1;
4267     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell> cell_;
4268     /**
4269      * <code>repeated .hbase.pb.Cell cell = 1;</code>
4270      *
4271      * <pre>
4272      * Result includes the Cells or else it just has a count of Cells
4273      * that are carried otherwise.
4274      * </pre>
4275      */
4276     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell> getCellList() {
4277       return cell_;
4278     }
4279     /**
4280      * <code>repeated .hbase.pb.Cell cell = 1;</code>
4281      *
4282      * <pre>
4283      * Result includes the Cells or else it just has a count of Cells
4284      * that are carried otherwise.
4285      * </pre>
4286      */
4287     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellOrBuilder> 
4288         getCellOrBuilderList() {
4289       return cell_;
4290     }
4291     /**
4292      * <code>repeated .hbase.pb.Cell cell = 1;</code>
4293      *
4294      * <pre>
4295      * Result includes the Cells or else it just has a count of Cells
4296      * that are carried otherwise.
4297      * </pre>
4298      */
4299     public int getCellCount() {
4300       return cell_.size();
4301     }
4302     /**
4303      * <code>repeated .hbase.pb.Cell cell = 1;</code>
4304      *
4305      * <pre>
4306      * Result includes the Cells or else it just has a count of Cells
4307      * that are carried otherwise.
4308      * </pre>
4309      */
4310     public org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell getCell(int index) {
4311       return cell_.get(index);
4312     }
4313     /**
4314      * <code>repeated .hbase.pb.Cell cell = 1;</code>
4315      *
4316      * <pre>
4317      * Result includes the Cells or else it just has a count of Cells
4318      * that are carried otherwise.
4319      * </pre>
4320      */
4321     public org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellOrBuilder getCellOrBuilder(
4322         int index) {
4323       return cell_.get(index);
4324     }
4325 
4326     // optional int32 associated_cell_count = 2;
4327     public static final int ASSOCIATED_CELL_COUNT_FIELD_NUMBER = 2;
4328     private int associatedCellCount_;
4329     /**
4330      * <code>optional int32 associated_cell_count = 2;</code>
4331      *
4332      * <pre>
4333      * The below count is set when the associated cells are
4334      * not part of this protobuf message; they are passed alongside
4335      * and then this Message is just a placeholder with metadata.
4336      * The count is needed to know how many to peel off the block of Cells as
4337      * ours.  NOTE: This is different from the pb managed cell_count of the
4338      * 'cell' field above which is non-null when the cells are pb'd.
4339      * </pre>
4340      */
4341     public boolean hasAssociatedCellCount() {
4342       return ((bitField0_ & 0x00000001) == 0x00000001);
4343     }
4344     /**
4345      * <code>optional int32 associated_cell_count = 2;</code>
4346      *
4347      * <pre>
4348      * The below count is set when the associated cells are
4349      * not part of this protobuf message; they are passed alongside
4350      * and then this Message is just a placeholder with metadata.
4351      * The count is needed to know how many to peel off the block of Cells as
4352      * ours.  NOTE: This is different from the pb managed cell_count of the
4353      * 'cell' field above which is non-null when the cells are pb'd.
4354      * </pre>
4355      */
4356     public int getAssociatedCellCount() {
4357       return associatedCellCount_;
4358     }
4359 
4360     // optional bool exists = 3;
4361     public static final int EXISTS_FIELD_NUMBER = 3;
4362     private boolean exists_;
4363     /**
4364      * <code>optional bool exists = 3;</code>
4365      *
4366      * <pre>
4367      * used for Get to check existence only. Not set if existence_only was not set to true
4368      *  in the query.
4369      * </pre>
4370      */
4371     public boolean hasExists() {
4372       return ((bitField0_ & 0x00000002) == 0x00000002);
4373     }
4374     /**
4375      * <code>optional bool exists = 3;</code>
4376      *
4377      * <pre>
4378      * used for Get to check existence only. Not set if existence_only was not set to true
4379      *  in the query.
4380      * </pre>
4381      */
4382     public boolean getExists() {
4383       return exists_;
4384     }
4385 
4386     // optional bool stale = 4 [default = false];
4387     public static final int STALE_FIELD_NUMBER = 4;
4388     private boolean stale_;
4389     /**
4390      * <code>optional bool stale = 4 [default = false];</code>
4391      *
4392      * <pre>
4393      * Whether or not the results are coming from possibly stale data 
4394      * </pre>
4395      */
4396     public boolean hasStale() {
4397       return ((bitField0_ & 0x00000004) == 0x00000004);
4398     }
4399     /**
4400      * <code>optional bool stale = 4 [default = false];</code>
4401      *
4402      * <pre>
4403      * Whether or not the results are coming from possibly stale data 
4404      * </pre>
4405      */
4406     public boolean getStale() {
4407       return stale_;
4408     }
4409 
4410     // optional bool partial = 5 [default = false];
4411     public static final int PARTIAL_FIELD_NUMBER = 5;
4412     private boolean partial_;
4413     /**
4414      * <code>optional bool partial = 5 [default = false];</code>
4415      *
4416      * <pre>
4417      * Whether or not the entire result could be returned. Results will be split when
4418      * the RPC chunk size limit is reached. Partial results contain only a subset of the
4419      * cells for a row and must be combined with a result containing the remaining cells
4420      * to form a complete result
4421      * </pre>
4422      */
4423     public boolean hasPartial() {
4424       return ((bitField0_ & 0x00000008) == 0x00000008);
4425     }
4426     /**
4427      * <code>optional bool partial = 5 [default = false];</code>
4428      *
4429      * <pre>
4430      * Whether or not the entire result could be returned. Results will be split when
4431      * the RPC chunk size limit is reached. Partial results contain only a subset of the
4432      * cells for a row and must be combined with a result containing the remaining cells
4433      * to form a complete result
4434      * </pre>
4435      */
4436     public boolean getPartial() {
4437       return partial_;
4438     }
4439 
4440     private void initFields() {
4441       cell_ = java.util.Collections.emptyList();
4442       associatedCellCount_ = 0;
4443       exists_ = false;
4444       stale_ = false;
4445       partial_ = false;
4446     }
4447     private byte memoizedIsInitialized = -1;
4448     public final boolean isInitialized() {
4449       byte isInitialized = memoizedIsInitialized;
4450       if (isInitialized != -1) return isInitialized == 1;
4451 
4452       memoizedIsInitialized = 1;
4453       return true;
4454     }
4455 
4456     public void writeTo(com.google.protobuf.CodedOutputStream output)
4457                         throws java.io.IOException {
4458       getSerializedSize();
4459       for (int i = 0; i < cell_.size(); i++) {
4460         output.writeMessage(1, cell_.get(i));
4461       }
4462       if (((bitField0_ & 0x00000001) == 0x00000001)) {
4463         output.writeInt32(2, associatedCellCount_);
4464       }
4465       if (((bitField0_ & 0x00000002) == 0x00000002)) {
4466         output.writeBool(3, exists_);
4467       }
4468       if (((bitField0_ & 0x00000004) == 0x00000004)) {
4469         output.writeBool(4, stale_);
4470       }
4471       if (((bitField0_ & 0x00000008) == 0x00000008)) {
4472         output.writeBool(5, partial_);
4473       }
4474       getUnknownFields().writeTo(output);
4475     }
4476 
4477     private int memoizedSerializedSize = -1;
4478     public int getSerializedSize() {
4479       int size = memoizedSerializedSize;
4480       if (size != -1) return size;
4481 
4482       size = 0;
4483       for (int i = 0; i < cell_.size(); i++) {
4484         size += com.google.protobuf.CodedOutputStream
4485           .computeMessageSize(1, cell_.get(i));
4486       }
4487       if (((bitField0_ & 0x00000001) == 0x00000001)) {
4488         size += com.google.protobuf.CodedOutputStream
4489           .computeInt32Size(2, associatedCellCount_);
4490       }
4491       if (((bitField0_ & 0x00000002) == 0x00000002)) {
4492         size += com.google.protobuf.CodedOutputStream
4493           .computeBoolSize(3, exists_);
4494       }
4495       if (((bitField0_ & 0x00000004) == 0x00000004)) {
4496         size += com.google.protobuf.CodedOutputStream
4497           .computeBoolSize(4, stale_);
4498       }
4499       if (((bitField0_ & 0x00000008) == 0x00000008)) {
4500         size += com.google.protobuf.CodedOutputStream
4501           .computeBoolSize(5, partial_);
4502       }
4503       size += getUnknownFields().getSerializedSize();
4504       memoizedSerializedSize = size;
4505       return size;
4506     }
4507 
4508     private static final long serialVersionUID = 0L;
4509     @java.lang.Override
4510     protected java.lang.Object writeReplace()
4511         throws java.io.ObjectStreamException {
4512       return super.writeReplace();
4513     }
4514 
4515     @java.lang.Override
4516     public boolean equals(final java.lang.Object obj) {
4517       if (obj == this) {
4518        return true;
4519       }
4520       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result)) {
4521         return super.equals(obj);
4522       }
4523       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result) obj;
4524 
4525       boolean result = true;
4526       result = result && getCellList()
4527           .equals(other.getCellList());
4528       result = result && (hasAssociatedCellCount() == other.hasAssociatedCellCount());
4529       if (hasAssociatedCellCount()) {
4530         result = result && (getAssociatedCellCount()
4531             == other.getAssociatedCellCount());
4532       }
4533       result = result && (hasExists() == other.hasExists());
4534       if (hasExists()) {
4535         result = result && (getExists()
4536             == other.getExists());
4537       }
4538       result = result && (hasStale() == other.hasStale());
4539       if (hasStale()) {
4540         result = result && (getStale()
4541             == other.getStale());
4542       }
4543       result = result && (hasPartial() == other.hasPartial());
4544       if (hasPartial()) {
4545         result = result && (getPartial()
4546             == other.getPartial());
4547       }
4548       result = result &&
4549           getUnknownFields().equals(other.getUnknownFields());
4550       return result;
4551     }
4552 
4553     private int memoizedHashCode = 0;
4554     @java.lang.Override
4555     public int hashCode() {
4556       if (memoizedHashCode != 0) {
4557         return memoizedHashCode;
4558       }
4559       int hash = 41;
4560       hash = (19 * hash) + getDescriptorForType().hashCode();
4561       if (getCellCount() > 0) {
4562         hash = (37 * hash) + CELL_FIELD_NUMBER;
4563         hash = (53 * hash) + getCellList().hashCode();
4564       }
4565       if (hasAssociatedCellCount()) {
4566         hash = (37 * hash) + ASSOCIATED_CELL_COUNT_FIELD_NUMBER;
4567         hash = (53 * hash) + getAssociatedCellCount();
4568       }
4569       if (hasExists()) {
4570         hash = (37 * hash) + EXISTS_FIELD_NUMBER;
4571         hash = (53 * hash) + hashBoolean(getExists());
4572       }
4573       if (hasStale()) {
4574         hash = (37 * hash) + STALE_FIELD_NUMBER;
4575         hash = (53 * hash) + hashBoolean(getStale());
4576       }
4577       if (hasPartial()) {
4578         hash = (37 * hash) + PARTIAL_FIELD_NUMBER;
4579         hash = (53 * hash) + hashBoolean(getPartial());
4580       }
4581       hash = (29 * hash) + getUnknownFields().hashCode();
4582       memoizedHashCode = hash;
4583       return hash;
4584     }
4585 
4586     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom(
4587         com.google.protobuf.ByteString data)
4588         throws com.google.protobuf.InvalidProtocolBufferException {
4589       return PARSER.parseFrom(data);
4590     }
4591     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom(
4592         com.google.protobuf.ByteString data,
4593         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4594         throws com.google.protobuf.InvalidProtocolBufferException {
4595       return PARSER.parseFrom(data, extensionRegistry);
4596     }
4597     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom(byte[] data)
4598         throws com.google.protobuf.InvalidProtocolBufferException {
4599       return PARSER.parseFrom(data);
4600     }
4601     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom(
4602         byte[] data,
4603         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4604         throws com.google.protobuf.InvalidProtocolBufferException {
4605       return PARSER.parseFrom(data, extensionRegistry);
4606     }
4607     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom(java.io.InputStream input)
4608         throws java.io.IOException {
4609       return PARSER.parseFrom(input);
4610     }
4611     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom(
4612         java.io.InputStream input,
4613         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4614         throws java.io.IOException {
4615       return PARSER.parseFrom(input, extensionRegistry);
4616     }
4617     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseDelimitedFrom(java.io.InputStream input)
4618         throws java.io.IOException {
4619       return PARSER.parseDelimitedFrom(input);
4620     }
4621     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseDelimitedFrom(
4622         java.io.InputStream input,
4623         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4624         throws java.io.IOException {
4625       return PARSER.parseDelimitedFrom(input, extensionRegistry);
4626     }
4627     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom(
4628         com.google.protobuf.CodedInputStream input)
4629         throws java.io.IOException {
4630       return PARSER.parseFrom(input);
4631     }
4632     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom(
4633         com.google.protobuf.CodedInputStream input,
4634         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4635         throws java.io.IOException {
4636       return PARSER.parseFrom(input, extensionRegistry);
4637     }
4638 
4639     public static Builder newBuilder() { return Builder.create(); }
4640     public Builder newBuilderForType() { return newBuilder(); }
4641     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result prototype) {
4642       return newBuilder().mergeFrom(prototype);
4643     }
4644     public Builder toBuilder() { return newBuilder(this); }
4645 
4646     @java.lang.Override
4647     protected Builder newBuilderForType(
4648         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
4649       Builder builder = new Builder(parent);
4650       return builder;
4651     }
4652     /**
4653      * Protobuf type {@code hbase.pb.Result}
4654      */
4655     public static final class Builder extends
4656         com.google.protobuf.GeneratedMessage.Builder<Builder>
4657        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder {
4658       public static final com.google.protobuf.Descriptors.Descriptor
4659           getDescriptor() {
4660         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Result_descriptor;
4661       }
4662 
4663       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
4664           internalGetFieldAccessorTable() {
4665         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Result_fieldAccessorTable
4666             .ensureFieldAccessorsInitialized(
4667                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder.class);
4668       }
4669 
4670       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder()
4671       private Builder() {
4672         maybeForceBuilderInitialization();
4673       }
4674 
4675       private Builder(
4676           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
4677         super(parent);
4678         maybeForceBuilderInitialization();
4679       }
4680       private void maybeForceBuilderInitialization() {
4681         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
4682           getCellFieldBuilder();
4683         }
4684       }
4685       private static Builder create() {
4686         return new Builder();
4687       }
4688 
4689       public Builder clear() {
4690         super.clear();
4691         if (cellBuilder_ == null) {
4692           cell_ = java.util.Collections.emptyList();
4693           bitField0_ = (bitField0_ & ~0x00000001);
4694         } else {
4695           cellBuilder_.clear();
4696         }
4697         associatedCellCount_ = 0;
4698         bitField0_ = (bitField0_ & ~0x00000002);
4699         exists_ = false;
4700         bitField0_ = (bitField0_ & ~0x00000004);
4701         stale_ = false;
4702         bitField0_ = (bitField0_ & ~0x00000008);
4703         partial_ = false;
4704         bitField0_ = (bitField0_ & ~0x00000010);
4705         return this;
4706       }
4707 
4708       public Builder clone() {
4709         return create().mergeFrom(buildPartial());
4710       }
4711 
4712       public com.google.protobuf.Descriptors.Descriptor
4713           getDescriptorForType() {
4714         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Result_descriptor;
4715       }
4716 
4717       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getDefaultInstanceForType() {
4718         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
4719       }
4720 
4721       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result build() {
4722         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result = buildPartial();
4723         if (!result.isInitialized()) {
4724           throw newUninitializedMessageException(result);
4725         }
4726         return result;
4727       }
4728 
4729       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result buildPartial() {
4730         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result(this);
4731         int from_bitField0_ = bitField0_;
4732         int to_bitField0_ = 0;
4733         if (cellBuilder_ == null) {
4734           if (((bitField0_ & 0x00000001) == 0x00000001)) {
4735             cell_ = java.util.Collections.unmodifiableList(cell_);
4736             bitField0_ = (bitField0_ & ~0x00000001);
4737           }
4738           result.cell_ = cell_;
4739         } else {
4740           result.cell_ = cellBuilder_.build();
4741         }
4742         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
4743           to_bitField0_ |= 0x00000001;
4744         }
4745         result.associatedCellCount_ = associatedCellCount_;
4746         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
4747           to_bitField0_ |= 0x00000002;
4748         }
4749         result.exists_ = exists_;
4750         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
4751           to_bitField0_ |= 0x00000004;
4752         }
4753         result.stale_ = stale_;
4754         if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
4755           to_bitField0_ |= 0x00000008;
4756         }
4757         result.partial_ = partial_;
4758         result.bitField0_ = to_bitField0_;
4759         onBuilt();
4760         return result;
4761       }
4762 
4763       public Builder mergeFrom(com.google.protobuf.Message other) {
4764         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result) {
4765           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result)other);
4766         } else {
4767           super.mergeFrom(other);
4768           return this;
4769         }
4770       }
4771 
4772       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result other) {
4773         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance()) return this;
4774         if (cellBuilder_ == null) {
4775           if (!other.cell_.isEmpty()) {
4776             if (cell_.isEmpty()) {
4777               cell_ = other.cell_;
4778               bitField0_ = (bitField0_ & ~0x00000001);
4779             } else {
4780               ensureCellIsMutable();
4781               cell_.addAll(other.cell_);
4782             }
4783             onChanged();
4784           }
4785         } else {
4786           if (!other.cell_.isEmpty()) {
4787             if (cellBuilder_.isEmpty()) {
4788               cellBuilder_.dispose();
4789               cellBuilder_ = null;
4790               cell_ = other.cell_;
4791               bitField0_ = (bitField0_ & ~0x00000001);
4792               cellBuilder_ = 
4793                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
4794                    getCellFieldBuilder() : null;
4795             } else {
4796               cellBuilder_.addAllMessages(other.cell_);
4797             }
4798           }
4799         }
4800         if (other.hasAssociatedCellCount()) {
4801           setAssociatedCellCount(other.getAssociatedCellCount());
4802         }
4803         if (other.hasExists()) {
4804           setExists(other.getExists());
4805         }
4806         if (other.hasStale()) {
4807           setStale(other.getStale());
4808         }
4809         if (other.hasPartial()) {
4810           setPartial(other.getPartial());
4811         }
4812         this.mergeUnknownFields(other.getUnknownFields());
4813         return this;
4814       }
4815 
4816       public final boolean isInitialized() {
4817         return true;
4818       }
4819 
4820       public Builder mergeFrom(
4821           com.google.protobuf.CodedInputStream input,
4822           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4823           throws java.io.IOException {
4824         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parsedMessage = null;
4825         try {
4826           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
4827         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
4828           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result) e.getUnfinishedMessage();
4829           throw e;
4830         } finally {
4831           if (parsedMessage != null) {
4832             mergeFrom(parsedMessage);
4833           }
4834         }
4835         return this;
4836       }
4837       private int bitField0_;
4838 
4839       // repeated .hbase.pb.Cell cell = 1;
4840       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell> cell_ =
4841         java.util.Collections.emptyList();
4842       private void ensureCellIsMutable() {
4843         if (!((bitField0_ & 0x00000001) == 0x00000001)) {
4844           cell_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell>(cell_);
4845           bitField0_ |= 0x00000001;
4846          }
4847       }
4848 
4849       private com.google.protobuf.RepeatedFieldBuilder<
4850           org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell, org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder, org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellOrBuilder> cellBuilder_;
4851 
4852       /**
4853        * <code>repeated .hbase.pb.Cell cell = 1;</code>
4854        *
4855        * <pre>
4856        * Result includes the Cells or else it just has a count of Cells
4857        * that are carried otherwise.
4858        * </pre>
4859        */
4860       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell> getCellList() {
4861         if (cellBuilder_ == null) {
4862           return java.util.Collections.unmodifiableList(cell_);
4863         } else {
4864           return cellBuilder_.getMessageList();
4865         }
4866       }
4867       /**
4868        * <code>repeated .hbase.pb.Cell cell = 1;</code>
4869        *
4870        * <pre>
4871        * Result includes the Cells or else it just has a count of Cells
4872        * that are carried otherwise.
4873        * </pre>
4874        */
4875       public int getCellCount() {
4876         if (cellBuilder_ == null) {
4877           return cell_.size();
4878         } else {
4879           return cellBuilder_.getCount();
4880         }
4881       }
4882       /**
4883        * <code>repeated .hbase.pb.Cell cell = 1;</code>
4884        *
4885        * <pre>
4886        * Result includes the Cells or else it just has a count of Cells
4887        * that are carried otherwise.
4888        * </pre>
4889        */
4890       public org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell getCell(int index) {
4891         if (cellBuilder_ == null) {
4892           return cell_.get(index);
4893         } else {
4894           return cellBuilder_.getMessage(index);
4895         }
4896       }
4897       /**
4898        * <code>repeated .hbase.pb.Cell cell = 1;</code>
4899        *
4900        * <pre>
4901        * Result includes the Cells or else it just has a count of Cells
4902        * that are carried otherwise.
4903        * </pre>
4904        */
4905       public Builder setCell(
4906           int index, org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell value) {
4907         if (cellBuilder_ == null) {
4908           if (value == null) {
4909             throw new NullPointerException();
4910           }
4911           ensureCellIsMutable();
4912           cell_.set(index, value);
4913           onChanged();
4914         } else {
4915           cellBuilder_.setMessage(index, value);
4916         }
4917         return this;
4918       }
4919       /**
4920        * <code>repeated .hbase.pb.Cell cell = 1;</code>
4921        *
4922        * <pre>
4923        * Result includes the Cells or else it just has a count of Cells
4924        * that are carried otherwise.
4925        * </pre>
4926        */
4927       public Builder setCell(
4928           int index, org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder builderForValue) {
4929         if (cellBuilder_ == null) {
4930           ensureCellIsMutable();
4931           cell_.set(index, builderForValue.build());
4932           onChanged();
4933         } else {
4934           cellBuilder_.setMessage(index, builderForValue.build());
4935         }
4936         return this;
4937       }
4938       /**
4939        * <code>repeated .hbase.pb.Cell cell = 1;</code>
4940        *
4941        * <pre>
4942        * Result includes the Cells or else it just has a count of Cells
4943        * that are carried otherwise.
4944        * </pre>
4945        */
4946       public Builder addCell(org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell value) {
4947         if (cellBuilder_ == null) {
4948           if (value == null) {
4949             throw new NullPointerException();
4950           }
4951           ensureCellIsMutable();
4952           cell_.add(value);
4953           onChanged();
4954         } else {
4955           cellBuilder_.addMessage(value);
4956         }
4957         return this;
4958       }
4959       /**
4960        * <code>repeated .hbase.pb.Cell cell = 1;</code>
4961        *
4962        * <pre>
4963        * Result includes the Cells or else it just has a count of Cells
4964        * that are carried otherwise.
4965        * </pre>
4966        */
4967       public Builder addCell(
4968           int index, org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell value) {
4969         if (cellBuilder_ == null) {
4970           if (value == null) {
4971             throw new NullPointerException();
4972           }
4973           ensureCellIsMutable();
4974           cell_.add(index, value);
4975           onChanged();
4976         } else {
4977           cellBuilder_.addMessage(index, value);
4978         }
4979         return this;
4980       }
4981       /**
4982        * <code>repeated .hbase.pb.Cell cell = 1;</code>
4983        *
4984        * <pre>
4985        * Result includes the Cells or else it just has a count of Cells
4986        * that are carried otherwise.
4987        * </pre>
4988        */
4989       public Builder addCell(
4990           org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder builderForValue) {
4991         if (cellBuilder_ == null) {
4992           ensureCellIsMutable();
4993           cell_.add(builderForValue.build());
4994           onChanged();
4995         } else {
4996           cellBuilder_.addMessage(builderForValue.build());
4997         }
4998         return this;
4999       }
5000       /**
5001        * <code>repeated .hbase.pb.Cell cell = 1;</code>
5002        *
5003        * <pre>
5004        * Result includes the Cells or else it just has a count of Cells
5005        * that are carried otherwise.
5006        * </pre>
5007        */
5008       public Builder addCell(
5009           int index, org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder builderForValue) {
5010         if (cellBuilder_ == null) {
5011           ensureCellIsMutable();
5012           cell_.add(index, builderForValue.build());
5013           onChanged();
5014         } else {
5015           cellBuilder_.addMessage(index, builderForValue.build());
5016         }
5017         return this;
5018       }
5019       /**
5020        * <code>repeated .hbase.pb.Cell cell = 1;</code>
5021        *
5022        * <pre>
5023        * Result includes the Cells or else it just has a count of Cells
5024        * that are carried otherwise.
5025        * </pre>
5026        */
5027       public Builder addAllCell(
5028           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell> values) {
5029         if (cellBuilder_ == null) {
5030           ensureCellIsMutable();
5031           super.addAll(values, cell_);
5032           onChanged();
5033         } else {
5034           cellBuilder_.addAllMessages(values);
5035         }
5036         return this;
5037       }
5038       /**
5039        * <code>repeated .hbase.pb.Cell cell = 1;</code>
5040        *
5041        * <pre>
5042        * Result includes the Cells or else it just has a count of Cells
5043        * that are carried otherwise.
5044        * </pre>
5045        */
5046       public Builder clearCell() {
5047         if (cellBuilder_ == null) {
5048           cell_ = java.util.Collections.emptyList();
5049           bitField0_ = (bitField0_ & ~0x00000001);
5050           onChanged();
5051         } else {
5052           cellBuilder_.clear();
5053         }
5054         return this;
5055       }
5056       /**
5057        * <code>repeated .hbase.pb.Cell cell = 1;</code>
5058        *
5059        * <pre>
5060        * Result includes the Cells or else it just has a count of Cells
5061        * that are carried otherwise.
5062        * </pre>
5063        */
5064       public Builder removeCell(int index) {
5065         if (cellBuilder_ == null) {
5066           ensureCellIsMutable();
5067           cell_.remove(index);
5068           onChanged();
5069         } else {
5070           cellBuilder_.remove(index);
5071         }
5072         return this;
5073       }
5074       /**
5075        * <code>repeated .hbase.pb.Cell cell = 1;</code>
5076        *
5077        * <pre>
5078        * Result includes the Cells or else it just has a count of Cells
5079        * that are carried otherwise.
5080        * </pre>
5081        */
5082       public org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder getCellBuilder(
5083           int index) {
5084         return getCellFieldBuilder().getBuilder(index);
5085       }
5086       /**
5087        * <code>repeated .hbase.pb.Cell cell = 1;</code>
5088        *
5089        * <pre>
5090        * Result includes the Cells or else it just has a count of Cells
5091        * that are carried otherwise.
5092        * </pre>
5093        */
5094       public org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellOrBuilder getCellOrBuilder(
5095           int index) {
5096         if (cellBuilder_ == null) {
5097           return cell_.get(index);  } else {
5098           return cellBuilder_.getMessageOrBuilder(index);
5099         }
5100       }
5101       /**
5102        * <code>repeated .hbase.pb.Cell cell = 1;</code>
5103        *
5104        * <pre>
5105        * Result includes the Cells or else it just has a count of Cells
5106        * that are carried otherwise.
5107        * </pre>
5108        */
5109       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellOrBuilder> 
5110            getCellOrBuilderList() {
5111         if (cellBuilder_ != null) {
5112           return cellBuilder_.getMessageOrBuilderList();
5113         } else {
5114           return java.util.Collections.unmodifiableList(cell_);
5115         }
5116       }
5117       /**
5118        * <code>repeated .hbase.pb.Cell cell = 1;</code>
5119        *
5120        * <pre>
5121        * Result includes the Cells or else it just has a count of Cells
5122        * that are carried otherwise.
5123        * </pre>
5124        */
5125       public org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder addCellBuilder() {
5126         return getCellFieldBuilder().addBuilder(
5127             org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.getDefaultInstance());
5128       }
5129       /**
5130        * <code>repeated .hbase.pb.Cell cell = 1;</code>
5131        *
5132        * <pre>
5133        * Result includes the Cells or else it just has a count of Cells
5134        * that are carried otherwise.
5135        * </pre>
5136        */
5137       public org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder addCellBuilder(
5138           int index) {
5139         return getCellFieldBuilder().addBuilder(
5140             index, org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.getDefaultInstance());
5141       }
5142       /**
5143        * <code>repeated .hbase.pb.Cell cell = 1;</code>
5144        *
5145        * <pre>
5146        * Result includes the Cells or else it just has a count of Cells
5147        * that are carried otherwise.
5148        * </pre>
5149        */
5150       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder> 
5151            getCellBuilderList() {
5152         return getCellFieldBuilder().getBuilderList();
5153       }
5154       private com.google.protobuf.RepeatedFieldBuilder<
5155           org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell, org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder, org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellOrBuilder> 
5156           getCellFieldBuilder() {
5157         if (cellBuilder_ == null) {
5158           cellBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
5159               org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell, org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder, org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellOrBuilder>(
5160                   cell_,
5161                   ((bitField0_ & 0x00000001) == 0x00000001),
5162                   getParentForChildren(),
5163                   isClean());
5164           cell_ = null;
5165         }
5166         return cellBuilder_;
5167       }
5168 
5169       // optional int32 associated_cell_count = 2;
5170       private int associatedCellCount_ ;
5171       /**
5172        * <code>optional int32 associated_cell_count = 2;</code>
5173        *
5174        * <pre>
5175        * The below count is set when the associated cells are
5176        * not part of this protobuf message; they are passed alongside
5177        * and then this Message is just a placeholder with metadata.
5178        * The count is needed to know how many to peel off the block of Cells as
5179        * ours.  NOTE: This is different from the pb managed cell_count of the
5180        * 'cell' field above which is non-null when the cells are pb'd.
5181        * </pre>
5182        */
5183       public boolean hasAssociatedCellCount() {
5184         return ((bitField0_ & 0x00000002) == 0x00000002);
5185       }
5186       /**
5187        * <code>optional int32 associated_cell_count = 2;</code>
5188        *
5189        * <pre>
5190        * The below count is set when the associated cells are
5191        * not part of this protobuf message; they are passed alongside
5192        * and then this Message is just a placeholder with metadata.
5193        * The count is needed to know how many to peel off the block of Cells as
5194        * ours.  NOTE: This is different from the pb managed cell_count of the
5195        * 'cell' field above which is non-null when the cells are pb'd.
5196        * </pre>
5197        */
5198       public int getAssociatedCellCount() {
5199         return associatedCellCount_;
5200       }
5201       /**
5202        * <code>optional int32 associated_cell_count = 2;</code>
5203        *
5204        * <pre>
5205        * The below count is set when the associated cells are
5206        * not part of this protobuf message; they are passed alongside
5207        * and then this Message is just a placeholder with metadata.
5208        * The count is needed to know how many to peel off the block of Cells as
5209        * ours.  NOTE: This is different from the pb managed cell_count of the
5210        * 'cell' field above which is non-null when the cells are pb'd.
5211        * </pre>
5212        */
5213       public Builder setAssociatedCellCount(int value) {
5214         bitField0_ |= 0x00000002;
5215         associatedCellCount_ = value;
5216         onChanged();
5217         return this;
5218       }
5219       /**
5220        * <code>optional int32 associated_cell_count = 2;</code>
5221        *
5222        * <pre>
5223        * The below count is set when the associated cells are
5224        * not part of this protobuf message; they are passed alongside
5225        * and then this Message is just a placeholder with metadata.
5226        * The count is needed to know how many to peel off the block of Cells as
5227        * ours.  NOTE: This is different from the pb managed cell_count of the
5228        * 'cell' field above which is non-null when the cells are pb'd.
5229        * </pre>
5230        */
5231       public Builder clearAssociatedCellCount() {
5232         bitField0_ = (bitField0_ & ~0x00000002);
5233         associatedCellCount_ = 0;
5234         onChanged();
5235         return this;
5236       }
5237 
5238       // optional bool exists = 3;
5239       private boolean exists_ ;
5240       /**
5241        * <code>optional bool exists = 3;</code>
5242        *
5243        * <pre>
5244        * used for Get to check existence only. Not set if existence_only was not set to true
5245        *  in the query.
5246        * </pre>
5247        */
5248       public boolean hasExists() {
5249         return ((bitField0_ & 0x00000004) == 0x00000004);
5250       }
5251       /**
5252        * <code>optional bool exists = 3;</code>
5253        *
5254        * <pre>
5255        * used for Get to check existence only. Not set if existence_only was not set to true
5256        *  in the query.
5257        * </pre>
5258        */
5259       public boolean getExists() {
5260         return exists_;
5261       }
5262       /**
5263        * <code>optional bool exists = 3;</code>
5264        *
5265        * <pre>
5266        * used for Get to check existence only. Not set if existence_only was not set to true
5267        *  in the query.
5268        * </pre>
5269        */
5270       public Builder setExists(boolean value) {
5271         bitField0_ |= 0x00000004;
5272         exists_ = value;
5273         onChanged();
5274         return this;
5275       }
5276       /**
5277        * <code>optional bool exists = 3;</code>
5278        *
5279        * <pre>
5280        * used for Get to check existence only. Not set if existence_only was not set to true
5281        *  in the query.
5282        * </pre>
5283        */
5284       public Builder clearExists() {
5285         bitField0_ = (bitField0_ & ~0x00000004);
5286         exists_ = false;
5287         onChanged();
5288         return this;
5289       }
5290 
5291       // optional bool stale = 4 [default = false];
5292       private boolean stale_ ;
5293       /**
5294        * <code>optional bool stale = 4 [default = false];</code>
5295        *
5296        * <pre>
5297        * Whether or not the results are coming from possibly stale data 
5298        * </pre>
5299        */
5300       public boolean hasStale() {
5301         return ((bitField0_ & 0x00000008) == 0x00000008);
5302       }
5303       /**
5304        * <code>optional bool stale = 4 [default = false];</code>
5305        *
5306        * <pre>
5307        * Whether or not the results are coming from possibly stale data 
5308        * </pre>
5309        */
5310       public boolean getStale() {
5311         return stale_;
5312       }
5313       /**
5314        * <code>optional bool stale = 4 [default = false];</code>
5315        *
5316        * <pre>
5317        * Whether or not the results are coming from possibly stale data 
5318        * </pre>
5319        */
5320       public Builder setStale(boolean value) {
5321         bitField0_ |= 0x00000008;
5322         stale_ = value;
5323         onChanged();
5324         return this;
5325       }
5326       /**
5327        * <code>optional bool stale = 4 [default = false];</code>
5328        *
5329        * <pre>
5330        * Whether or not the results are coming from possibly stale data 
5331        * </pre>
5332        */
5333       public Builder clearStale() {
5334         bitField0_ = (bitField0_ & ~0x00000008);
5335         stale_ = false;
5336         onChanged();
5337         return this;
5338       }
5339 
5340       // optional bool partial = 5 [default = false];
5341       private boolean partial_ ;
5342       /**
5343        * <code>optional bool partial = 5 [default = false];</code>
5344        *
5345        * <pre>
5346        * Whether or not the entire result could be returned. Results will be split when
5347        * the RPC chunk size limit is reached. Partial results contain only a subset of the
5348        * cells for a row and must be combined with a result containing the remaining cells
5349        * to form a complete result
5350        * </pre>
5351        */
5352       public boolean hasPartial() {
5353         return ((bitField0_ & 0x00000010) == 0x00000010);
5354       }
5355       /**
5356        * <code>optional bool partial = 5 [default = false];</code>
5357        *
5358        * <pre>
5359        * Whether or not the entire result could be returned. Results will be split when
5360        * the RPC chunk size limit is reached. Partial results contain only a subset of the
5361        * cells for a row and must be combined with a result containing the remaining cells
5362        * to form a complete result
5363        * </pre>
5364        */
5365       public boolean getPartial() {
5366         return partial_;
5367       }
5368       /**
5369        * <code>optional bool partial = 5 [default = false];</code>
5370        *
5371        * <pre>
5372        * Whether or not the entire result could be returned. Results will be split when
5373        * the RPC chunk size limit is reached. Partial results contain only a subset of the
5374        * cells for a row and must be combined with a result containing the remaining cells
5375        * to form a complete result
5376        * </pre>
5377        */
5378       public Builder setPartial(boolean value) {
5379         bitField0_ |= 0x00000010;
5380         partial_ = value;
5381         onChanged();
5382         return this;
5383       }
5384       /**
5385        * <code>optional bool partial = 5 [default = false];</code>
5386        *
5387        * <pre>
5388        * Whether or not the entire result could be returned. Results will be split when
5389        * the RPC chunk size limit is reached. Partial results contain only a subset of the
5390        * cells for a row and must be combined with a result containing the remaining cells
5391        * to form a complete result
5392        * </pre>
5393        */
5394       public Builder clearPartial() {
5395         bitField0_ = (bitField0_ & ~0x00000010);
5396         partial_ = false;
5397         onChanged();
5398         return this;
5399       }
5400 
5401       // @@protoc_insertion_point(builder_scope:hbase.pb.Result)
5402     }
5403 
5404     static {
5405       defaultInstance = new Result(true);
5406       defaultInstance.initFields();
5407     }
5408 
5409     // @@protoc_insertion_point(class_scope:hbase.pb.Result)
5410   }
5411 
5412   public interface GetRequestOrBuilder
5413       extends com.google.protobuf.MessageOrBuilder {
5414 
5415     // required .hbase.pb.RegionSpecifier region = 1;
5416     /**
5417      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
5418      */
5419     boolean hasRegion();
5420     /**
5421      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
5422      */
5423     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
5424     /**
5425      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
5426      */
5427     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
5428 
5429     // required .hbase.pb.Get get = 2;
5430     /**
5431      * <code>required .hbase.pb.Get get = 2;</code>
5432      */
5433     boolean hasGet();
5434     /**
5435      * <code>required .hbase.pb.Get get = 2;</code>
5436      */
5437     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet();
5438     /**
5439      * <code>required .hbase.pb.Get get = 2;</code>
5440      */
5441     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder();
5442   }
5443   /**
5444    * Protobuf type {@code hbase.pb.GetRequest}
5445    *
5446    * <pre>
5447    **
5448    * The get request. Perform a single Get operation.
5449    * </pre>
5450    */
5451   public static final class GetRequest extends
5452       com.google.protobuf.GeneratedMessage
5453       implements GetRequestOrBuilder {
5454     // Use GetRequest.newBuilder() to construct.
5455     private GetRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
5456       super(builder);
5457       this.unknownFields = builder.getUnknownFields();
5458     }
5459     private GetRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
5460 
5461     private static final GetRequest defaultInstance;
5462     public static GetRequest getDefaultInstance() {
5463       return defaultInstance;
5464     }
5465 
5466     public GetRequest getDefaultInstanceForType() {
5467       return defaultInstance;
5468     }
5469 
5470     private final com.google.protobuf.UnknownFieldSet unknownFields;
5471     @java.lang.Override
5472     public final com.google.protobuf.UnknownFieldSet
5473         getUnknownFields() {
5474       return this.unknownFields;
5475     }
5476     private GetRequest(
5477         com.google.protobuf.CodedInputStream input,
5478         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5479         throws com.google.protobuf.InvalidProtocolBufferException {
5480       initFields();
5481       int mutable_bitField0_ = 0;
5482       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
5483           com.google.protobuf.UnknownFieldSet.newBuilder();
5484       try {
5485         boolean done = false;
5486         while (!done) {
5487           int tag = input.readTag();
5488           switch (tag) {
5489             case 0:
5490               done = true;
5491               break;
5492             default: {
5493               if (!parseUnknownField(input, unknownFields,
5494                                      extensionRegistry, tag)) {
5495                 done = true;
5496               }
5497               break;
5498             }
5499             case 10: {
5500               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null;
5501               if (((bitField0_ & 0x00000001) == 0x00000001)) {
5502                 subBuilder = region_.toBuilder();
5503               }
5504               region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry);
5505               if (subBuilder != null) {
5506                 subBuilder.mergeFrom(region_);
5507                 region_ = subBuilder.buildPartial();
5508               }
5509               bitField0_ |= 0x00000001;
5510               break;
5511             }
5512             case 18: {
5513               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder subBuilder = null;
5514               if (((bitField0_ & 0x00000002) == 0x00000002)) {
5515                 subBuilder = get_.toBuilder();
5516               }
5517               get_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.PARSER, extensionRegistry);
5518               if (subBuilder != null) {
5519                 subBuilder.mergeFrom(get_);
5520                 get_ = subBuilder.buildPartial();
5521               }
5522               bitField0_ |= 0x00000002;
5523               break;
5524             }
5525           }
5526         }
5527       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
5528         throw e.setUnfinishedMessage(this);
5529       } catch (java.io.IOException e) {
5530         throw new com.google.protobuf.InvalidProtocolBufferException(
5531             e.getMessage()).setUnfinishedMessage(this);
5532       } finally {
5533         this.unknownFields = unknownFields.build();
5534         makeExtensionsImmutable();
5535       }
5536     }
5537     public static final com.google.protobuf.Descriptors.Descriptor
5538         getDescriptor() {
5539       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_GetRequest_descriptor;
5540     }
5541 
5542     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
5543         internalGetFieldAccessorTable() {
5544       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_GetRequest_fieldAccessorTable
5545           .ensureFieldAccessorsInitialized(
5546               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.Builder.class);
5547     }
5548 
5549     public static com.google.protobuf.Parser<GetRequest> PARSER =
5550         new com.google.protobuf.AbstractParser<GetRequest>() {
5551       public GetRequest parsePartialFrom(
5552           com.google.protobuf.CodedInputStream input,
5553           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5554           throws com.google.protobuf.InvalidProtocolBufferException {
5555         return new GetRequest(input, extensionRegistry);
5556       }
5557     };
5558 
5559     @java.lang.Override
5560     public com.google.protobuf.Parser<GetRequest> getParserForType() {
5561       return PARSER;
5562     }
5563 
5564     private int bitField0_;
5565     // required .hbase.pb.RegionSpecifier region = 1;
5566     public static final int REGION_FIELD_NUMBER = 1;
5567     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_;
5568     /**
5569      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
5570      */
5571     public boolean hasRegion() {
5572       return ((bitField0_ & 0x00000001) == 0x00000001);
5573     }
5574     /**
5575      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
5576      */
5577     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
5578       return region_;
5579     }
5580     /**
5581      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
5582      */
5583     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
5584       return region_;
5585     }
5586 
5587     // required .hbase.pb.Get get = 2;
5588     public static final int GET_FIELD_NUMBER = 2;
5589     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get get_;
5590     /**
5591      * <code>required .hbase.pb.Get get = 2;</code>
5592      */
5593     public boolean hasGet() {
5594       return ((bitField0_ & 0x00000002) == 0x00000002);
5595     }
5596     /**
5597      * <code>required .hbase.pb.Get get = 2;</code>
5598      */
5599     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet() {
5600       return get_;
5601     }
5602     /**
5603      * <code>required .hbase.pb.Get get = 2;</code>
5604      */
5605     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder() {
5606       return get_;
5607     }
5608 
5609     private void initFields() {
5610       region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
5611       get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance();
5612     }
5613     private byte memoizedIsInitialized = -1;
5614     public final boolean isInitialized() {
5615       byte isInitialized = memoizedIsInitialized;
5616       if (isInitialized != -1) return isInitialized == 1;
5617 
5618       if (!hasRegion()) {
5619         memoizedIsInitialized = 0;
5620         return false;
5621       }
5622       if (!hasGet()) {
5623         memoizedIsInitialized = 0;
5624         return false;
5625       }
5626       if (!getRegion().isInitialized()) {
5627         memoizedIsInitialized = 0;
5628         return false;
5629       }
5630       if (!getGet().isInitialized()) {
5631         memoizedIsInitialized = 0;
5632         return false;
5633       }
5634       memoizedIsInitialized = 1;
5635       return true;
5636     }
5637 
5638     public void writeTo(com.google.protobuf.CodedOutputStream output)
5639                         throws java.io.IOException {
5640       getSerializedSize();
5641       if (((bitField0_ & 0x00000001) == 0x00000001)) {
5642         output.writeMessage(1, region_);
5643       }
5644       if (((bitField0_ & 0x00000002) == 0x00000002)) {
5645         output.writeMessage(2, get_);
5646       }
5647       getUnknownFields().writeTo(output);
5648     }
5649 
5650     private int memoizedSerializedSize = -1;
5651     public int getSerializedSize() {
5652       int size = memoizedSerializedSize;
5653       if (size != -1) return size;
5654 
5655       size = 0;
5656       if (((bitField0_ & 0x00000001) == 0x00000001)) {
5657         size += com.google.protobuf.CodedOutputStream
5658           .computeMessageSize(1, region_);
5659       }
5660       if (((bitField0_ & 0x00000002) == 0x00000002)) {
5661         size += com.google.protobuf.CodedOutputStream
5662           .computeMessageSize(2, get_);
5663       }
5664       size += getUnknownFields().getSerializedSize();
5665       memoizedSerializedSize = size;
5666       return size;
5667     }
5668 
5669     private static final long serialVersionUID = 0L;
5670     @java.lang.Override
5671     protected java.lang.Object writeReplace()
5672         throws java.io.ObjectStreamException {
5673       return super.writeReplace();
5674     }
5675 
5676     @java.lang.Override
5677     public boolean equals(final java.lang.Object obj) {
5678       if (obj == this) {
5679        return true;
5680       }
5681       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest)) {
5682         return super.equals(obj);
5683       }
5684       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest) obj;
5685 
5686       boolean result = true;
5687       result = result && (hasRegion() == other.hasRegion());
5688       if (hasRegion()) {
5689         result = result && getRegion()
5690             .equals(other.getRegion());
5691       }
5692       result = result && (hasGet() == other.hasGet());
5693       if (hasGet()) {
5694         result = result && getGet()
5695             .equals(other.getGet());
5696       }
5697       result = result &&
5698           getUnknownFields().equals(other.getUnknownFields());
5699       return result;
5700     }
5701 
5702     private int memoizedHashCode = 0;
5703     @java.lang.Override
5704     public int hashCode() {
5705       if (memoizedHashCode != 0) {
5706         return memoizedHashCode;
5707       }
5708       int hash = 41;
5709       hash = (19 * hash) + getDescriptorForType().hashCode();
5710       if (hasRegion()) {
5711         hash = (37 * hash) + REGION_FIELD_NUMBER;
5712         hash = (53 * hash) + getRegion().hashCode();
5713       }
5714       if (hasGet()) {
5715         hash = (37 * hash) + GET_FIELD_NUMBER;
5716         hash = (53 * hash) + getGet().hashCode();
5717       }
5718       hash = (29 * hash) + getUnknownFields().hashCode();
5719       memoizedHashCode = hash;
5720       return hash;
5721     }
5722 
5723     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom(
5724         com.google.protobuf.ByteString data)
5725         throws com.google.protobuf.InvalidProtocolBufferException {
5726       return PARSER.parseFrom(data);
5727     }
5728     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom(
5729         com.google.protobuf.ByteString data,
5730         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5731         throws com.google.protobuf.InvalidProtocolBufferException {
5732       return PARSER.parseFrom(data, extensionRegistry);
5733     }
5734     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom(byte[] data)
5735         throws com.google.protobuf.InvalidProtocolBufferException {
5736       return PARSER.parseFrom(data);
5737     }
5738     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom(
5739         byte[] data,
5740         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5741         throws com.google.protobuf.InvalidProtocolBufferException {
5742       return PARSER.parseFrom(data, extensionRegistry);
5743     }
5744     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom(java.io.InputStream input)
5745         throws java.io.IOException {
5746       return PARSER.parseFrom(input);
5747     }
5748     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom(
5749         java.io.InputStream input,
5750         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5751         throws java.io.IOException {
5752       return PARSER.parseFrom(input, extensionRegistry);
5753     }
5754     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseDelimitedFrom(java.io.InputStream input)
5755         throws java.io.IOException {
5756       return PARSER.parseDelimitedFrom(input);
5757     }
5758     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseDelimitedFrom(
5759         java.io.InputStream input,
5760         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5761         throws java.io.IOException {
5762       return PARSER.parseDelimitedFrom(input, extensionRegistry);
5763     }
5764     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom(
5765         com.google.protobuf.CodedInputStream input)
5766         throws java.io.IOException {
5767       return PARSER.parseFrom(input);
5768     }
5769     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom(
5770         com.google.protobuf.CodedInputStream input,
5771         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5772         throws java.io.IOException {
5773       return PARSER.parseFrom(input, extensionRegistry);
5774     }
5775 
5776     public static Builder newBuilder() { return Builder.create(); }
5777     public Builder newBuilderForType() { return newBuilder(); }
5778     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest prototype) {
5779       return newBuilder().mergeFrom(prototype);
5780     }
5781     public Builder toBuilder() { return newBuilder(this); }
5782 
5783     @java.lang.Override
5784     protected Builder newBuilderForType(
5785         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
5786       Builder builder = new Builder(parent);
5787       return builder;
5788     }
5789     /**
5790      * Protobuf type {@code hbase.pb.GetRequest}
5791      *
5792      * <pre>
5793      **
5794      * The get request. Perform a single Get operation.
5795      * </pre>
5796      */
5797     public static final class Builder extends
5798         com.google.protobuf.GeneratedMessage.Builder<Builder>
5799        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequestOrBuilder {
5800       public static final com.google.protobuf.Descriptors.Descriptor
5801           getDescriptor() {
5802         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_GetRequest_descriptor;
5803       }
5804 
5805       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
5806           internalGetFieldAccessorTable() {
5807         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_GetRequest_fieldAccessorTable
5808             .ensureFieldAccessorsInitialized(
5809                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.Builder.class);
5810       }
5811 
5812       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.newBuilder()
5813       private Builder() {
5814         maybeForceBuilderInitialization();
5815       }
5816 
5817       private Builder(
5818           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
5819         super(parent);
5820         maybeForceBuilderInitialization();
5821       }
5822       private void maybeForceBuilderInitialization() {
5823         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
5824           getRegionFieldBuilder();
5825           getGetFieldBuilder();
5826         }
5827       }
5828       private static Builder create() {
5829         return new Builder();
5830       }
5831 
5832       public Builder clear() {
5833         super.clear();
5834         if (regionBuilder_ == null) {
5835           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
5836         } else {
5837           regionBuilder_.clear();
5838         }
5839         bitField0_ = (bitField0_ & ~0x00000001);
5840         if (getBuilder_ == null) {
5841           get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance();
5842         } else {
5843           getBuilder_.clear();
5844         }
5845         bitField0_ = (bitField0_ & ~0x00000002);
5846         return this;
5847       }
5848 
5849       public Builder clone() {
5850         return create().mergeFrom(buildPartial());
5851       }
5852 
5853       public com.google.protobuf.Descriptors.Descriptor
5854           getDescriptorForType() {
5855         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_GetRequest_descriptor;
5856       }
5857 
5858       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest getDefaultInstanceForType() {
5859         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.getDefaultInstance();
5860       }
5861 
5862       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest build() {
5863         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest result = buildPartial();
5864         if (!result.isInitialized()) {
5865           throw newUninitializedMessageException(result);
5866         }
5867         return result;
5868       }
5869 
5870       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest buildPartial() {
5871         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest(this);
5872         int from_bitField0_ = bitField0_;
5873         int to_bitField0_ = 0;
5874         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
5875           to_bitField0_ |= 0x00000001;
5876         }
5877         if (regionBuilder_ == null) {
5878           result.region_ = region_;
5879         } else {
5880           result.region_ = regionBuilder_.build();
5881         }
5882         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
5883           to_bitField0_ |= 0x00000002;
5884         }
5885         if (getBuilder_ == null) {
5886           result.get_ = get_;
5887         } else {
5888           result.get_ = getBuilder_.build();
5889         }
5890         result.bitField0_ = to_bitField0_;
5891         onBuilt();
5892         return result;
5893       }
5894 
5895       public Builder mergeFrom(com.google.protobuf.Message other) {
5896         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest) {
5897           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest)other);
5898         } else {
5899           super.mergeFrom(other);
5900           return this;
5901         }
5902       }
5903 
5904       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest other) {
5905         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.getDefaultInstance()) return this;
5906         if (other.hasRegion()) {
5907           mergeRegion(other.getRegion());
5908         }
5909         if (other.hasGet()) {
5910           mergeGet(other.getGet());
5911         }
5912         this.mergeUnknownFields(other.getUnknownFields());
5913         return this;
5914       }
5915 
5916       public final boolean isInitialized() {
5917         if (!hasRegion()) {
5918           
5919           return false;
5920         }
5921         if (!hasGet()) {
5922           
5923           return false;
5924         }
5925         if (!getRegion().isInitialized()) {
5926           
5927           return false;
5928         }
5929         if (!getGet().isInitialized()) {
5930           
5931           return false;
5932         }
5933         return true;
5934       }
5935 
5936       public Builder mergeFrom(
5937           com.google.protobuf.CodedInputStream input,
5938           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5939           throws java.io.IOException {
5940         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parsedMessage = null;
5941         try {
5942           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
5943         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
5944           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest) e.getUnfinishedMessage();
5945           throw e;
5946         } finally {
5947           if (parsedMessage != null) {
5948             mergeFrom(parsedMessage);
5949           }
5950         }
5951         return this;
5952       }
5953       private int bitField0_;
5954 
5955       // required .hbase.pb.RegionSpecifier region = 1;
5956       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
5957       private com.google.protobuf.SingleFieldBuilder<
5958           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_;
5959       /**
5960        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
5961        */
5962       public boolean hasRegion() {
5963         return ((bitField0_ & 0x00000001) == 0x00000001);
5964       }
5965       /**
5966        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
5967        */
5968       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
5969         if (regionBuilder_ == null) {
5970           return region_;
5971         } else {
5972           return regionBuilder_.getMessage();
5973         }
5974       }
5975       /**
5976        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
5977        */
5978       public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
5979         if (regionBuilder_ == null) {
5980           if (value == null) {
5981             throw new NullPointerException();
5982           }
5983           region_ = value;
5984           onChanged();
5985         } else {
5986           regionBuilder_.setMessage(value);
5987         }
5988         bitField0_ |= 0x00000001;
5989         return this;
5990       }
5991       /**
5992        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
5993        */
5994       public Builder setRegion(
5995           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
5996         if (regionBuilder_ == null) {
5997           region_ = builderForValue.build();
5998           onChanged();
5999         } else {
6000           regionBuilder_.setMessage(builderForValue.build());
6001         }
6002         bitField0_ |= 0x00000001;
6003         return this;
6004       }
6005       /**
6006        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
6007        */
6008       public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
6009         if (regionBuilder_ == null) {
6010           if (((bitField0_ & 0x00000001) == 0x00000001) &&
6011               region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) {
6012             region_ =
6013               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial();
6014           } else {
6015             region_ = value;
6016           }
6017           onChanged();
6018         } else {
6019           regionBuilder_.mergeFrom(value);
6020         }
6021         bitField0_ |= 0x00000001;
6022         return this;
6023       }
6024       /**
6025        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
6026        */
6027       public Builder clearRegion() {
6028         if (regionBuilder_ == null) {
6029           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
6030           onChanged();
6031         } else {
6032           regionBuilder_.clear();
6033         }
6034         bitField0_ = (bitField0_ & ~0x00000001);
6035         return this;
6036       }
6037       /**
6038        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
6039        */
6040       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() {
6041         bitField0_ |= 0x00000001;
6042         onChanged();
6043         return getRegionFieldBuilder().getBuilder();
6044       }
6045       /**
6046        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
6047        */
6048       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
6049         if (regionBuilder_ != null) {
6050           return regionBuilder_.getMessageOrBuilder();
6051         } else {
6052           return region_;
6053         }
6054       }
6055       /**
6056        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
6057        */
6058       private com.google.protobuf.SingleFieldBuilder<
6059           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> 
6060           getRegionFieldBuilder() {
6061         if (regionBuilder_ == null) {
6062           regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
6063               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
6064                   region_,
6065                   getParentForChildren(),
6066                   isClean());
6067           region_ = null;
6068         }
6069         return regionBuilder_;
6070       }
6071 
6072       // required .hbase.pb.Get get = 2;
6073       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance();
6074       private com.google.protobuf.SingleFieldBuilder<
6075           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder> getBuilder_;
6076       /**
6077        * <code>required .hbase.pb.Get get = 2;</code>
6078        */
6079       public boolean hasGet() {
6080         return ((bitField0_ & 0x00000002) == 0x00000002);
6081       }
6082       /**
6083        * <code>required .hbase.pb.Get get = 2;</code>
6084        */
6085       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet() {
6086         if (getBuilder_ == null) {
6087           return get_;
6088         } else {
6089           return getBuilder_.getMessage();
6090         }
6091       }
6092       /**
6093        * <code>required .hbase.pb.Get get = 2;</code>
6094        */
6095       public Builder setGet(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get value) {
6096         if (getBuilder_ == null) {
6097           if (value == null) {
6098             throw new NullPointerException();
6099           }
6100           get_ = value;
6101           onChanged();
6102         } else {
6103           getBuilder_.setMessage(value);
6104         }
6105         bitField0_ |= 0x00000002;
6106         return this;
6107       }
6108       /**
6109        * <code>required .hbase.pb.Get get = 2;</code>
6110        */
6111       public Builder setGet(
6112           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder builderForValue) {
6113         if (getBuilder_ == null) {
6114           get_ = builderForValue.build();
6115           onChanged();
6116         } else {
6117           getBuilder_.setMessage(builderForValue.build());
6118         }
6119         bitField0_ |= 0x00000002;
6120         return this;
6121       }
6122       /**
6123        * <code>required .hbase.pb.Get get = 2;</code>
6124        */
6125       public Builder mergeGet(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get value) {
6126         if (getBuilder_ == null) {
6127           if (((bitField0_ & 0x00000002) == 0x00000002) &&
6128               get_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance()) {
6129             get_ =
6130               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.newBuilder(get_).mergeFrom(value).buildPartial();
6131           } else {
6132             get_ = value;
6133           }
6134           onChanged();
6135         } else {
6136           getBuilder_.mergeFrom(value);
6137         }
6138         bitField0_ |= 0x00000002;
6139         return this;
6140       }
6141       /**
6142        * <code>required .hbase.pb.Get get = 2;</code>
6143        */
6144       public Builder clearGet() {
6145         if (getBuilder_ == null) {
6146           get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance();
6147           onChanged();
6148         } else {
6149           getBuilder_.clear();
6150         }
6151         bitField0_ = (bitField0_ & ~0x00000002);
6152         return this;
6153       }
6154       /**
6155        * <code>required .hbase.pb.Get get = 2;</code>
6156        */
6157       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder getGetBuilder() {
6158         bitField0_ |= 0x00000002;
6159         onChanged();
6160         return getGetFieldBuilder().getBuilder();
6161       }
6162       /**
6163        * <code>required .hbase.pb.Get get = 2;</code>
6164        */
6165       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder() {
6166         if (getBuilder_ != null) {
6167           return getBuilder_.getMessageOrBuilder();
6168         } else {
6169           return get_;
6170         }
6171       }
6172       /**
6173        * <code>required .hbase.pb.Get get = 2;</code>
6174        */
6175       private com.google.protobuf.SingleFieldBuilder<
6176           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder> 
6177           getGetFieldBuilder() {
6178         if (getBuilder_ == null) {
6179           getBuilder_ = new com.google.protobuf.SingleFieldBuilder<
6180               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder>(
6181                   get_,
6182                   getParentForChildren(),
6183                   isClean());
6184           get_ = null;
6185         }
6186         return getBuilder_;
6187       }
6188 
6189       // @@protoc_insertion_point(builder_scope:hbase.pb.GetRequest)
6190     }
6191 
6192     static {
6193       defaultInstance = new GetRequest(true);
6194       defaultInstance.initFields();
6195     }
6196 
6197     // @@protoc_insertion_point(class_scope:hbase.pb.GetRequest)
6198   }
6199 
6200   public interface GetResponseOrBuilder
6201       extends com.google.protobuf.MessageOrBuilder {
6202 
6203     // optional .hbase.pb.Result result = 1;
6204     /**
6205      * <code>optional .hbase.pb.Result result = 1;</code>
6206      */
6207     boolean hasResult();
6208     /**
6209      * <code>optional .hbase.pb.Result result = 1;</code>
6210      */
6211     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult();
6212     /**
6213      * <code>optional .hbase.pb.Result result = 1;</code>
6214      */
6215     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder();
6216   }
6217   /**
6218    * Protobuf type {@code hbase.pb.GetResponse}
6219    */
6220   public static final class GetResponse extends
6221       com.google.protobuf.GeneratedMessage
6222       implements GetResponseOrBuilder {
6223     // Use GetResponse.newBuilder() to construct.
6224     private GetResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
6225       super(builder);
6226       this.unknownFields = builder.getUnknownFields();
6227     }
6228     private GetResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
6229 
6230     private static final GetResponse defaultInstance;
6231     public static GetResponse getDefaultInstance() {
6232       return defaultInstance;
6233     }
6234 
6235     public GetResponse getDefaultInstanceForType() {
6236       return defaultInstance;
6237     }
6238 
6239     private final com.google.protobuf.UnknownFieldSet unknownFields;
6240     @java.lang.Override
6241     public final com.google.protobuf.UnknownFieldSet
6242         getUnknownFields() {
6243       return this.unknownFields;
6244     }
6245     private GetResponse(
6246         com.google.protobuf.CodedInputStream input,
6247         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6248         throws com.google.protobuf.InvalidProtocolBufferException {
6249       initFields();
6250       int mutable_bitField0_ = 0;
6251       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
6252           com.google.protobuf.UnknownFieldSet.newBuilder();
6253       try {
6254         boolean done = false;
6255         while (!done) {
6256           int tag = input.readTag();
6257           switch (tag) {
6258             case 0:
6259               done = true;
6260               break;
6261             default: {
6262               if (!parseUnknownField(input, unknownFields,
6263                                      extensionRegistry, tag)) {
6264                 done = true;
6265               }
6266               break;
6267             }
6268             case 10: {
6269               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder subBuilder = null;
6270               if (((bitField0_ & 0x00000001) == 0x00000001)) {
6271                 subBuilder = result_.toBuilder();
6272               }
6273               result_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.PARSER, extensionRegistry);
6274               if (subBuilder != null) {
6275                 subBuilder.mergeFrom(result_);
6276                 result_ = subBuilder.buildPartial();
6277               }
6278               bitField0_ |= 0x00000001;
6279               break;
6280             }
6281           }
6282         }
6283       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
6284         throw e.setUnfinishedMessage(this);
6285       } catch (java.io.IOException e) {
6286         throw new com.google.protobuf.InvalidProtocolBufferException(
6287             e.getMessage()).setUnfinishedMessage(this);
6288       } finally {
6289         this.unknownFields = unknownFields.build();
6290         makeExtensionsImmutable();
6291       }
6292     }
6293     public static final com.google.protobuf.Descriptors.Descriptor
6294         getDescriptor() {
6295       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_GetResponse_descriptor;
6296     }
6297 
6298     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
6299         internalGetFieldAccessorTable() {
6300       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_GetResponse_fieldAccessorTable
6301           .ensureFieldAccessorsInitialized(
6302               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.Builder.class);
6303     }
6304 
6305     public static com.google.protobuf.Parser<GetResponse> PARSER =
6306         new com.google.protobuf.AbstractParser<GetResponse>() {
6307       public GetResponse parsePartialFrom(
6308           com.google.protobuf.CodedInputStream input,
6309           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6310           throws com.google.protobuf.InvalidProtocolBufferException {
6311         return new GetResponse(input, extensionRegistry);
6312       }
6313     };
6314 
6315     @java.lang.Override
6316     public com.google.protobuf.Parser<GetResponse> getParserForType() {
6317       return PARSER;
6318     }
6319 
6320     private int bitField0_;
6321     // optional .hbase.pb.Result result = 1;
6322     public static final int RESULT_FIELD_NUMBER = 1;
6323     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result_;
6324     /**
6325      * <code>optional .hbase.pb.Result result = 1;</code>
6326      */
6327     public boolean hasResult() {
6328       return ((bitField0_ & 0x00000001) == 0x00000001);
6329     }
6330     /**
6331      * <code>optional .hbase.pb.Result result = 1;</code>
6332      */
6333     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult() {
6334       return result_;
6335     }
6336     /**
6337      * <code>optional .hbase.pb.Result result = 1;</code>
6338      */
6339     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() {
6340       return result_;
6341     }
6342 
6343     private void initFields() {
6344       result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
6345     }
6346     private byte memoizedIsInitialized = -1;
6347     public final boolean isInitialized() {
6348       byte isInitialized = memoizedIsInitialized;
6349       if (isInitialized != -1) return isInitialized == 1;
6350 
6351       memoizedIsInitialized = 1;
6352       return true;
6353     }
6354 
6355     public void writeTo(com.google.protobuf.CodedOutputStream output)
6356                         throws java.io.IOException {
6357       getSerializedSize();
6358       if (((bitField0_ & 0x00000001) == 0x00000001)) {
6359         output.writeMessage(1, result_);
6360       }
6361       getUnknownFields().writeTo(output);
6362     }
6363 
6364     private int memoizedSerializedSize = -1;
6365     public int getSerializedSize() {
6366       int size = memoizedSerializedSize;
6367       if (size != -1) return size;
6368 
6369       size = 0;
6370       if (((bitField0_ & 0x00000001) == 0x00000001)) {
6371         size += com.google.protobuf.CodedOutputStream
6372           .computeMessageSize(1, result_);
6373       }
6374       size += getUnknownFields().getSerializedSize();
6375       memoizedSerializedSize = size;
6376       return size;
6377     }
6378 
6379     private static final long serialVersionUID = 0L;
6380     @java.lang.Override
6381     protected java.lang.Object writeReplace()
6382         throws java.io.ObjectStreamException {
6383       return super.writeReplace();
6384     }
6385 
6386     @java.lang.Override
6387     public boolean equals(final java.lang.Object obj) {
6388       if (obj == this) {
6389        return true;
6390       }
6391       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse)) {
6392         return super.equals(obj);
6393       }
6394       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse) obj;
6395 
6396       boolean result = true;
6397       result = result && (hasResult() == other.hasResult());
6398       if (hasResult()) {
6399         result = result && getResult()
6400             .equals(other.getResult());
6401       }
6402       result = result &&
6403           getUnknownFields().equals(other.getUnknownFields());
6404       return result;
6405     }
6406 
6407     private int memoizedHashCode = 0;
6408     @java.lang.Override
6409     public int hashCode() {
6410       if (memoizedHashCode != 0) {
6411         return memoizedHashCode;
6412       }
6413       int hash = 41;
6414       hash = (19 * hash) + getDescriptorForType().hashCode();
6415       if (hasResult()) {
6416         hash = (37 * hash) + RESULT_FIELD_NUMBER;
6417         hash = (53 * hash) + getResult().hashCode();
6418       }
6419       hash = (29 * hash) + getUnknownFields().hashCode();
6420       memoizedHashCode = hash;
6421       return hash;
6422     }
6423 
6424     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom(
6425         com.google.protobuf.ByteString data)
6426         throws com.google.protobuf.InvalidProtocolBufferException {
6427       return PARSER.parseFrom(data);
6428     }
6429     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom(
6430         com.google.protobuf.ByteString data,
6431         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6432         throws com.google.protobuf.InvalidProtocolBufferException {
6433       return PARSER.parseFrom(data, extensionRegistry);
6434     }
6435     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom(byte[] data)
6436         throws com.google.protobuf.InvalidProtocolBufferException {
6437       return PARSER.parseFrom(data);
6438     }
6439     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom(
6440         byte[] data,
6441         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6442         throws com.google.protobuf.InvalidProtocolBufferException {
6443       return PARSER.parseFrom(data, extensionRegistry);
6444     }
6445     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom(java.io.InputStream input)
6446         throws java.io.IOException {
6447       return PARSER.parseFrom(input);
6448     }
6449     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom(
6450         java.io.InputStream input,
6451         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6452         throws java.io.IOException {
6453       return PARSER.parseFrom(input, extensionRegistry);
6454     }
6455     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseDelimitedFrom(java.io.InputStream input)
6456         throws java.io.IOException {
6457       return PARSER.parseDelimitedFrom(input);
6458     }
6459     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseDelimitedFrom(
6460         java.io.InputStream input,
6461         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6462         throws java.io.IOException {
6463       return PARSER.parseDelimitedFrom(input, extensionRegistry);
6464     }
6465     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom(
6466         com.google.protobuf.CodedInputStream input)
6467         throws java.io.IOException {
6468       return PARSER.parseFrom(input);
6469     }
6470     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom(
6471         com.google.protobuf.CodedInputStream input,
6472         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6473         throws java.io.IOException {
6474       return PARSER.parseFrom(input, extensionRegistry);
6475     }
6476 
6477     public static Builder newBuilder() { return Builder.create(); }
6478     public Builder newBuilderForType() { return newBuilder(); }
6479     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse prototype) {
6480       return newBuilder().mergeFrom(prototype);
6481     }
6482     public Builder toBuilder() { return newBuilder(this); }
6483 
6484     @java.lang.Override
6485     protected Builder newBuilderForType(
6486         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
6487       Builder builder = new Builder(parent);
6488       return builder;
6489     }
6490     /**
6491      * Protobuf type {@code hbase.pb.GetResponse}
6492      */
6493     public static final class Builder extends
6494         com.google.protobuf.GeneratedMessage.Builder<Builder>
6495        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponseOrBuilder {
6496       public static final com.google.protobuf.Descriptors.Descriptor
6497           getDescriptor() {
6498         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_GetResponse_descriptor;
6499       }
6500 
6501       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
6502           internalGetFieldAccessorTable() {
6503         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_GetResponse_fieldAccessorTable
6504             .ensureFieldAccessorsInitialized(
6505                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.Builder.class);
6506       }
6507 
6508       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.newBuilder()
6509       private Builder() {
6510         maybeForceBuilderInitialization();
6511       }
6512 
6513       private Builder(
6514           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
6515         super(parent);
6516         maybeForceBuilderInitialization();
6517       }
6518       private void maybeForceBuilderInitialization() {
6519         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
6520           getResultFieldBuilder();
6521         }
6522       }
6523       private static Builder create() {
6524         return new Builder();
6525       }
6526 
6527       public Builder clear() {
6528         super.clear();
6529         if (resultBuilder_ == null) {
6530           result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
6531         } else {
6532           resultBuilder_.clear();
6533         }
6534         bitField0_ = (bitField0_ & ~0x00000001);
6535         return this;
6536       }
6537 
6538       public Builder clone() {
6539         return create().mergeFrom(buildPartial());
6540       }
6541 
6542       public com.google.protobuf.Descriptors.Descriptor
6543           getDescriptorForType() {
6544         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_GetResponse_descriptor;
6545       }
6546 
6547       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse getDefaultInstanceForType() {
6548         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance();
6549       }
6550 
6551       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse build() {
6552         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse result = buildPartial();
6553         if (!result.isInitialized()) {
6554           throw newUninitializedMessageException(result);
6555         }
6556         return result;
6557       }
6558 
6559       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse buildPartial() {
6560         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse(this);
6561         int from_bitField0_ = bitField0_;
6562         int to_bitField0_ = 0;
6563         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
6564           to_bitField0_ |= 0x00000001;
6565         }
6566         if (resultBuilder_ == null) {
6567           result.result_ = result_;
6568         } else {
6569           result.result_ = resultBuilder_.build();
6570         }
6571         result.bitField0_ = to_bitField0_;
6572         onBuilt();
6573         return result;
6574       }
6575 
6576       public Builder mergeFrom(com.google.protobuf.Message other) {
6577         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse) {
6578           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse)other);
6579         } else {
6580           super.mergeFrom(other);
6581           return this;
6582         }
6583       }
6584 
6585       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse other) {
6586         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance()) return this;
6587         if (other.hasResult()) {
6588           mergeResult(other.getResult());
6589         }
6590         this.mergeUnknownFields(other.getUnknownFields());
6591         return this;
6592       }
6593 
6594       public final boolean isInitialized() {
6595         return true;
6596       }
6597 
6598       public Builder mergeFrom(
6599           com.google.protobuf.CodedInputStream input,
6600           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6601           throws java.io.IOException {
6602         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parsedMessage = null;
6603         try {
6604           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
6605         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
6606           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse) e.getUnfinishedMessage();
6607           throw e;
6608         } finally {
6609           if (parsedMessage != null) {
6610             mergeFrom(parsedMessage);
6611           }
6612         }
6613         return this;
6614       }
6615       private int bitField0_;
6616 
6617       // optional .hbase.pb.Result result = 1;
6618       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
6619       private com.google.protobuf.SingleFieldBuilder<
6620           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> resultBuilder_;
6621       /**
6622        * <code>optional .hbase.pb.Result result = 1;</code>
6623        */
6624       public boolean hasResult() {
6625         return ((bitField0_ & 0x00000001) == 0x00000001);
6626       }
6627       /**
6628        * <code>optional .hbase.pb.Result result = 1;</code>
6629        */
6630       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult() {
6631         if (resultBuilder_ == null) {
6632           return result_;
6633         } else {
6634           return resultBuilder_.getMessage();
6635         }
6636       }
6637       /**
6638        * <code>optional .hbase.pb.Result result = 1;</code>
6639        */
6640       public Builder setResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) {
6641         if (resultBuilder_ == null) {
6642           if (value == null) {
6643             throw new NullPointerException();
6644           }
6645           result_ = value;
6646           onChanged();
6647         } else {
6648           resultBuilder_.setMessage(value);
6649         }
6650         bitField0_ |= 0x00000001;
6651         return this;
6652       }
6653       /**
6654        * <code>optional .hbase.pb.Result result = 1;</code>
6655        */
6656       public Builder setResult(
6657           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) {
6658         if (resultBuilder_ == null) {
6659           result_ = builderForValue.build();
6660           onChanged();
6661         } else {
6662           resultBuilder_.setMessage(builderForValue.build());
6663         }
6664         bitField0_ |= 0x00000001;
6665         return this;
6666       }
6667       /**
6668        * <code>optional .hbase.pb.Result result = 1;</code>
6669        */
6670       public Builder mergeResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) {
6671         if (resultBuilder_ == null) {
6672           if (((bitField0_ & 0x00000001) == 0x00000001) &&
6673               result_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance()) {
6674             result_ =
6675               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder(result_).mergeFrom(value).buildPartial();
6676           } else {
6677             result_ = value;
6678           }
6679           onChanged();
6680         } else {
6681           resultBuilder_.mergeFrom(value);
6682         }
6683         bitField0_ |= 0x00000001;
6684         return this;
6685       }
6686       /**
6687        * <code>optional .hbase.pb.Result result = 1;</code>
6688        */
6689       public Builder clearResult() {
6690         if (resultBuilder_ == null) {
6691           result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
6692           onChanged();
6693         } else {
6694           resultBuilder_.clear();
6695         }
6696         bitField0_ = (bitField0_ & ~0x00000001);
6697         return this;
6698       }
6699       /**
6700        * <code>optional .hbase.pb.Result result = 1;</code>
6701        */
6702       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder getResultBuilder() {
6703         bitField0_ |= 0x00000001;
6704         onChanged();
6705         return getResultFieldBuilder().getBuilder();
6706       }
6707       /**
6708        * <code>optional .hbase.pb.Result result = 1;</code>
6709        */
6710       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() {
6711         if (resultBuilder_ != null) {
6712           return resultBuilder_.getMessageOrBuilder();
6713         } else {
6714           return result_;
6715         }
6716       }
6717       /**
6718        * <code>optional .hbase.pb.Result result = 1;</code>
6719        */
6720       private com.google.protobuf.SingleFieldBuilder<
6721           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> 
6722           getResultFieldBuilder() {
6723         if (resultBuilder_ == null) {
6724           resultBuilder_ = new com.google.protobuf.SingleFieldBuilder<
6725               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder>(
6726                   result_,
6727                   getParentForChildren(),
6728                   isClean());
6729           result_ = null;
6730         }
6731         return resultBuilder_;
6732       }
6733 
6734       // @@protoc_insertion_point(builder_scope:hbase.pb.GetResponse)
6735     }
6736 
6737     static {
6738       defaultInstance = new GetResponse(true);
6739       defaultInstance.initFields();
6740     }
6741 
6742     // @@protoc_insertion_point(class_scope:hbase.pb.GetResponse)
6743   }
6744 
6745   public interface ConditionOrBuilder
6746       extends com.google.protobuf.MessageOrBuilder {
6747 
6748     // required bytes row = 1;
6749     /**
6750      * <code>required bytes row = 1;</code>
6751      */
6752     boolean hasRow();
6753     /**
6754      * <code>required bytes row = 1;</code>
6755      */
6756     com.google.protobuf.ByteString getRow();
6757 
6758     // required bytes family = 2;
6759     /**
6760      * <code>required bytes family = 2;</code>
6761      */
6762     boolean hasFamily();
6763     /**
6764      * <code>required bytes family = 2;</code>
6765      */
6766     com.google.protobuf.ByteString getFamily();
6767 
6768     // required bytes qualifier = 3;
6769     /**
6770      * <code>required bytes qualifier = 3;</code>
6771      */
6772     boolean hasQualifier();
6773     /**
6774      * <code>required bytes qualifier = 3;</code>
6775      */
6776     com.google.protobuf.ByteString getQualifier();
6777 
6778     // required .hbase.pb.CompareType compare_type = 4;
6779     /**
6780      * <code>required .hbase.pb.CompareType compare_type = 4;</code>
6781      */
6782     boolean hasCompareType();
6783     /**
6784      * <code>required .hbase.pb.CompareType compare_type = 4;</code>
6785      */
6786     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareType();
6787 
6788     // required .hbase.pb.Comparator comparator = 5;
6789     /**
6790      * <code>required .hbase.pb.Comparator comparator = 5;</code>
6791      */
6792     boolean hasComparator();
6793     /**
6794      * <code>required .hbase.pb.Comparator comparator = 5;</code>
6795      */
6796     org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator();
6797     /**
6798      * <code>required .hbase.pb.Comparator comparator = 5;</code>
6799      */
6800     org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder();
6801   }
6802   /**
6803    * Protobuf type {@code hbase.pb.Condition}
6804    *
6805    * <pre>
6806    **
6807    * Condition to check if the value of a given cell (row,
6808    * family, qualifier) matches a value via a given comparator.
6809    *
6810    * Condition is used in check and mutate operations.
6811    * </pre>
6812    */
6813   public static final class Condition extends
6814       com.google.protobuf.GeneratedMessage
6815       implements ConditionOrBuilder {
6816     // Use Condition.newBuilder() to construct.
6817     private Condition(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
6818       super(builder);
6819       this.unknownFields = builder.getUnknownFields();
6820     }
6821     private Condition(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
6822 
6823     private static final Condition defaultInstance;
6824     public static Condition getDefaultInstance() {
6825       return defaultInstance;
6826     }
6827 
6828     public Condition getDefaultInstanceForType() {
6829       return defaultInstance;
6830     }
6831 
6832     private final com.google.protobuf.UnknownFieldSet unknownFields;
6833     @java.lang.Override
6834     public final com.google.protobuf.UnknownFieldSet
6835         getUnknownFields() {
6836       return this.unknownFields;
6837     }
6838     private Condition(
6839         com.google.protobuf.CodedInputStream input,
6840         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6841         throws com.google.protobuf.InvalidProtocolBufferException {
6842       initFields();
6843       int mutable_bitField0_ = 0;
6844       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
6845           com.google.protobuf.UnknownFieldSet.newBuilder();
6846       try {
6847         boolean done = false;
6848         while (!done) {
6849           int tag = input.readTag();
6850           switch (tag) {
6851             case 0:
6852               done = true;
6853               break;
6854             default: {
6855               if (!parseUnknownField(input, unknownFields,
6856                                      extensionRegistry, tag)) {
6857                 done = true;
6858               }
6859               break;
6860             }
6861             case 10: {
6862               bitField0_ |= 0x00000001;
6863               row_ = input.readBytes();
6864               break;
6865             }
6866             case 18: {
6867               bitField0_ |= 0x00000002;
6868               family_ = input.readBytes();
6869               break;
6870             }
6871             case 26: {
6872               bitField0_ |= 0x00000004;
6873               qualifier_ = input.readBytes();
6874               break;
6875             }
6876             case 32: {
6877               int rawValue = input.readEnum();
6878               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType value = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.valueOf(rawValue);
6879               if (value == null) {
6880                 unknownFields.mergeVarintField(4, rawValue);
6881               } else {
6882                 bitField0_ |= 0x00000008;
6883                 compareType_ = value;
6884               }
6885               break;
6886             }
6887             case 42: {
6888               org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder subBuilder = null;
6889               if (((bitField0_ & 0x00000010) == 0x00000010)) {
6890                 subBuilder = comparator_.toBuilder();
6891               }
6892               comparator_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.PARSER, extensionRegistry);
6893               if (subBuilder != null) {
6894                 subBuilder.mergeFrom(comparator_);
6895                 comparator_ = subBuilder.buildPartial();
6896               }
6897               bitField0_ |= 0x00000010;
6898               break;
6899             }
6900           }
6901         }
6902       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
6903         throw e.setUnfinishedMessage(this);
6904       } catch (java.io.IOException e) {
6905         throw new com.google.protobuf.InvalidProtocolBufferException(
6906             e.getMessage()).setUnfinishedMessage(this);
6907       } finally {
6908         this.unknownFields = unknownFields.build();
6909         makeExtensionsImmutable();
6910       }
6911     }
6912     public static final com.google.protobuf.Descriptors.Descriptor
6913         getDescriptor() {
6914       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Condition_descriptor;
6915     }
6916 
6917     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
6918         internalGetFieldAccessorTable() {
6919       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Condition_fieldAccessorTable
6920           .ensureFieldAccessorsInitialized(
6921               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder.class);
6922     }
6923 
6924     public static com.google.protobuf.Parser<Condition> PARSER =
6925         new com.google.protobuf.AbstractParser<Condition>() {
6926       public Condition parsePartialFrom(
6927           com.google.protobuf.CodedInputStream input,
6928           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6929           throws com.google.protobuf.InvalidProtocolBufferException {
6930         return new Condition(input, extensionRegistry);
6931       }
6932     };
6933 
6934     @java.lang.Override
6935     public com.google.protobuf.Parser<Condition> getParserForType() {
6936       return PARSER;
6937     }
6938 
6939     private int bitField0_;
6940     // required bytes row = 1;
6941     public static final int ROW_FIELD_NUMBER = 1;
6942     private com.google.protobuf.ByteString row_;
6943     /**
6944      * <code>required bytes row = 1;</code>
6945      */
6946     public boolean hasRow() {
6947       return ((bitField0_ & 0x00000001) == 0x00000001);
6948     }
6949     /**
6950      * <code>required bytes row = 1;</code>
6951      */
6952     public com.google.protobuf.ByteString getRow() {
6953       return row_;
6954     }
6955 
6956     // required bytes family = 2;
6957     public static final int FAMILY_FIELD_NUMBER = 2;
6958     private com.google.protobuf.ByteString family_;
6959     /**
6960      * <code>required bytes family = 2;</code>
6961      */
6962     public boolean hasFamily() {
6963       return ((bitField0_ & 0x00000002) == 0x00000002);
6964     }
6965     /**
6966      * <code>required bytes family = 2;</code>
6967      */
6968     public com.google.protobuf.ByteString getFamily() {
6969       return family_;
6970     }
6971 
6972     // required bytes qualifier = 3;
6973     public static final int QUALIFIER_FIELD_NUMBER = 3;
6974     private com.google.protobuf.ByteString qualifier_;
6975     /**
6976      * <code>required bytes qualifier = 3;</code>
6977      */
6978     public boolean hasQualifier() {
6979       return ((bitField0_ & 0x00000004) == 0x00000004);
6980     }
6981     /**
6982      * <code>required bytes qualifier = 3;</code>
6983      */
6984     public com.google.protobuf.ByteString getQualifier() {
6985       return qualifier_;
6986     }
6987 
6988     // required .hbase.pb.CompareType compare_type = 4;
6989     public static final int COMPARE_TYPE_FIELD_NUMBER = 4;
6990     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType compareType_;
6991     /**
6992      * <code>required .hbase.pb.CompareType compare_type = 4;</code>
6993      */
6994     public boolean hasCompareType() {
6995       return ((bitField0_ & 0x00000008) == 0x00000008);
6996     }
6997     /**
6998      * <code>required .hbase.pb.CompareType compare_type = 4;</code>
6999      */
7000     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareType() {
7001       return compareType_;
7002     }
7003 
7004     // required .hbase.pb.Comparator comparator = 5;
7005     public static final int COMPARATOR_FIELD_NUMBER = 5;
7006     private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator comparator_;
7007     /**
7008      * <code>required .hbase.pb.Comparator comparator = 5;</code>
7009      */
7010     public boolean hasComparator() {
7011       return ((bitField0_ & 0x00000010) == 0x00000010);
7012     }
7013     /**
7014      * <code>required .hbase.pb.Comparator comparator = 5;</code>
7015      */
7016     public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator() {
7017       return comparator_;
7018     }
7019     /**
7020      * <code>required .hbase.pb.Comparator comparator = 5;</code>
7021      */
7022     public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder() {
7023       return comparator_;
7024     }
7025 
7026     private void initFields() {
7027       row_ = com.google.protobuf.ByteString.EMPTY;
7028       family_ = com.google.protobuf.ByteString.EMPTY;
7029       qualifier_ = com.google.protobuf.ByteString.EMPTY;
7030       compareType_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS;
7031       comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance();
7032     }
7033     private byte memoizedIsInitialized = -1;
7034     public final boolean isInitialized() {
7035       byte isInitialized = memoizedIsInitialized;
7036       if (isInitialized != -1) return isInitialized == 1;
7037 
7038       if (!hasRow()) {
7039         memoizedIsInitialized = 0;
7040         return false;
7041       }
7042       if (!hasFamily()) {
7043         memoizedIsInitialized = 0;
7044         return false;
7045       }
7046       if (!hasQualifier()) {
7047         memoizedIsInitialized = 0;
7048         return false;
7049       }
7050       if (!hasCompareType()) {
7051         memoizedIsInitialized = 0;
7052         return false;
7053       }
7054       if (!hasComparator()) {
7055         memoizedIsInitialized = 0;
7056         return false;
7057       }
7058       if (!getComparator().isInitialized()) {
7059         memoizedIsInitialized = 0;
7060         return false;
7061       }
7062       memoizedIsInitialized = 1;
7063       return true;
7064     }
7065 
7066     public void writeTo(com.google.protobuf.CodedOutputStream output)
7067                         throws java.io.IOException {
7068       getSerializedSize();
7069       if (((bitField0_ & 0x00000001) == 0x00000001)) {
7070         output.writeBytes(1, row_);
7071       }
7072       if (((bitField0_ & 0x00000002) == 0x00000002)) {
7073         output.writeBytes(2, family_);
7074       }
7075       if (((bitField0_ & 0x00000004) == 0x00000004)) {
7076         output.writeBytes(3, qualifier_);
7077       }
7078       if (((bitField0_ & 0x00000008) == 0x00000008)) {
7079         output.writeEnum(4, compareType_.getNumber());
7080       }
7081       if (((bitField0_ & 0x00000010) == 0x00000010)) {
7082         output.writeMessage(5, comparator_);
7083       }
7084       getUnknownFields().writeTo(output);
7085     }
7086 
7087     private int memoizedSerializedSize = -1;
7088     public int getSerializedSize() {
7089       int size = memoizedSerializedSize;
7090       if (size != -1) return size;
7091 
7092       size = 0;
7093       if (((bitField0_ & 0x00000001) == 0x00000001)) {
7094         size += com.google.protobuf.CodedOutputStream
7095           .computeBytesSize(1, row_);
7096       }
7097       if (((bitField0_ & 0x00000002) == 0x00000002)) {
7098         size += com.google.protobuf.CodedOutputStream
7099           .computeBytesSize(2, family_);
7100       }
7101       if (((bitField0_ & 0x00000004) == 0x00000004)) {
7102         size += com.google.protobuf.CodedOutputStream
7103           .computeBytesSize(3, qualifier_);
7104       }
7105       if (((bitField0_ & 0x00000008) == 0x00000008)) {
7106         size += com.google.protobuf.CodedOutputStream
7107           .computeEnumSize(4, compareType_.getNumber());
7108       }
7109       if (((bitField0_ & 0x00000010) == 0x00000010)) {
7110         size += com.google.protobuf.CodedOutputStream
7111           .computeMessageSize(5, comparator_);
7112       }
7113       size += getUnknownFields().getSerializedSize();
7114       memoizedSerializedSize = size;
7115       return size;
7116     }
7117 
7118     private static final long serialVersionUID = 0L;
7119     @java.lang.Override
7120     protected java.lang.Object writeReplace()
7121         throws java.io.ObjectStreamException {
7122       return super.writeReplace();
7123     }
7124 
7125     @java.lang.Override
7126     public boolean equals(final java.lang.Object obj) {
7127       if (obj == this) {
7128        return true;
7129       }
7130       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition)) {
7131         return super.equals(obj);
7132       }
7133       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition) obj;
7134 
7135       boolean result = true;
7136       result = result && (hasRow() == other.hasRow());
7137       if (hasRow()) {
7138         result = result && getRow()
7139             .equals(other.getRow());
7140       }
7141       result = result && (hasFamily() == other.hasFamily());
7142       if (hasFamily()) {
7143         result = result && getFamily()
7144             .equals(other.getFamily());
7145       }
7146       result = result && (hasQualifier() == other.hasQualifier());
7147       if (hasQualifier()) {
7148         result = result && getQualifier()
7149             .equals(other.getQualifier());
7150       }
7151       result = result && (hasCompareType() == other.hasCompareType());
7152       if (hasCompareType()) {
7153         result = result &&
7154             (getCompareType() == other.getCompareType());
7155       }
7156       result = result && (hasComparator() == other.hasComparator());
7157       if (hasComparator()) {
7158         result = result && getComparator()
7159             .equals(other.getComparator());
7160       }
7161       result = result &&
7162           getUnknownFields().equals(other.getUnknownFields());
7163       return result;
7164     }
7165 
7166     private int memoizedHashCode = 0;
7167     @java.lang.Override
7168     public int hashCode() {
7169       if (memoizedHashCode != 0) {
7170         return memoizedHashCode;
7171       }
7172       int hash = 41;
7173       hash = (19 * hash) + getDescriptorForType().hashCode();
7174       if (hasRow()) {
7175         hash = (37 * hash) + ROW_FIELD_NUMBER;
7176         hash = (53 * hash) + getRow().hashCode();
7177       }
7178       if (hasFamily()) {
7179         hash = (37 * hash) + FAMILY_FIELD_NUMBER;
7180         hash = (53 * hash) + getFamily().hashCode();
7181       }
7182       if (hasQualifier()) {
7183         hash = (37 * hash) + QUALIFIER_FIELD_NUMBER;
7184         hash = (53 * hash) + getQualifier().hashCode();
7185       }
7186       if (hasCompareType()) {
7187         hash = (37 * hash) + COMPARE_TYPE_FIELD_NUMBER;
7188         hash = (53 * hash) + hashEnum(getCompareType());
7189       }
7190       if (hasComparator()) {
7191         hash = (37 * hash) + COMPARATOR_FIELD_NUMBER;
7192         hash = (53 * hash) + getComparator().hashCode();
7193       }
7194       hash = (29 * hash) + getUnknownFields().hashCode();
7195       memoizedHashCode = hash;
7196       return hash;
7197     }
7198 
7199     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom(
7200         com.google.protobuf.ByteString data)
7201         throws com.google.protobuf.InvalidProtocolBufferException {
7202       return PARSER.parseFrom(data);
7203     }
7204     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom(
7205         com.google.protobuf.ByteString data,
7206         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7207         throws com.google.protobuf.InvalidProtocolBufferException {
7208       return PARSER.parseFrom(data, extensionRegistry);
7209     }
7210     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom(byte[] data)
7211         throws com.google.protobuf.InvalidProtocolBufferException {
7212       return PARSER.parseFrom(data);
7213     }
7214     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom(
7215         byte[] data,
7216         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7217         throws com.google.protobuf.InvalidProtocolBufferException {
7218       return PARSER.parseFrom(data, extensionRegistry);
7219     }
7220     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom(java.io.InputStream input)
7221         throws java.io.IOException {
7222       return PARSER.parseFrom(input);
7223     }
7224     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom(
7225         java.io.InputStream input,
7226         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7227         throws java.io.IOException {
7228       return PARSER.parseFrom(input, extensionRegistry);
7229     }
7230     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseDelimitedFrom(java.io.InputStream input)
7231         throws java.io.IOException {
7232       return PARSER.parseDelimitedFrom(input);
7233     }
7234     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseDelimitedFrom(
7235         java.io.InputStream input,
7236         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7237         throws java.io.IOException {
7238       return PARSER.parseDelimitedFrom(input, extensionRegistry);
7239     }
7240     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom(
7241         com.google.protobuf.CodedInputStream input)
7242         throws java.io.IOException {
7243       return PARSER.parseFrom(input);
7244     }
7245     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom(
7246         com.google.protobuf.CodedInputStream input,
7247         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7248         throws java.io.IOException {
7249       return PARSER.parseFrom(input, extensionRegistry);
7250     }
7251 
7252     public static Builder newBuilder() { return Builder.create(); }
7253     public Builder newBuilderForType() { return newBuilder(); }
7254     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition prototype) {
7255       return newBuilder().mergeFrom(prototype);
7256     }
7257     public Builder toBuilder() { return newBuilder(this); }
7258 
7259     @java.lang.Override
7260     protected Builder newBuilderForType(
7261         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
7262       Builder builder = new Builder(parent);
7263       return builder;
7264     }
7265     /**
7266      * Protobuf type {@code hbase.pb.Condition}
7267      *
7268      * <pre>
7269      **
7270      * Condition to check if the value of a given cell (row,
7271      * family, qualifier) matches a value via a given comparator.
7272      *
7273      * Condition is used in check and mutate operations.
7274      * </pre>
7275      */
7276     public static final class Builder extends
7277         com.google.protobuf.GeneratedMessage.Builder<Builder>
7278        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder {
7279       public static final com.google.protobuf.Descriptors.Descriptor
7280           getDescriptor() {
7281         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Condition_descriptor;
7282       }
7283 
7284       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
7285           internalGetFieldAccessorTable() {
7286         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Condition_fieldAccessorTable
7287             .ensureFieldAccessorsInitialized(
7288                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder.class);
7289       }
7290 
7291       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.newBuilder()
7292       private Builder() {
7293         maybeForceBuilderInitialization();
7294       }
7295 
7296       private Builder(
7297           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
7298         super(parent);
7299         maybeForceBuilderInitialization();
7300       }
7301       private void maybeForceBuilderInitialization() {
7302         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
7303           getComparatorFieldBuilder();
7304         }
7305       }
7306       private static Builder create() {
7307         return new Builder();
7308       }
7309 
7310       public Builder clear() {
7311         super.clear();
7312         row_ = com.google.protobuf.ByteString.EMPTY;
7313         bitField0_ = (bitField0_ & ~0x00000001);
7314         family_ = com.google.protobuf.ByteString.EMPTY;
7315         bitField0_ = (bitField0_ & ~0x00000002);
7316         qualifier_ = com.google.protobuf.ByteString.EMPTY;
7317         bitField0_ = (bitField0_ & ~0x00000004);
7318         compareType_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS;
7319         bitField0_ = (bitField0_ & ~0x00000008);
7320         if (comparatorBuilder_ == null) {
7321           comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance();
7322         } else {
7323           comparatorBuilder_.clear();
7324         }
7325         bitField0_ = (bitField0_ & ~0x00000010);
7326         return this;
7327       }
7328 
7329       public Builder clone() {
7330         return create().mergeFrom(buildPartial());
7331       }
7332 
7333       public com.google.protobuf.Descriptors.Descriptor
7334           getDescriptorForType() {
7335         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Condition_descriptor;
7336       }
7337 
7338       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition getDefaultInstanceForType() {
7339         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance();
7340       }
7341 
7342       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition build() {
7343         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition result = buildPartial();
7344         if (!result.isInitialized()) {
7345           throw newUninitializedMessageException(result);
7346         }
7347         return result;
7348       }
7349 
7350       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition buildPartial() {
7351         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition(this);
7352         int from_bitField0_ = bitField0_;
7353         int to_bitField0_ = 0;
7354         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
7355           to_bitField0_ |= 0x00000001;
7356         }
7357         result.row_ = row_;
7358         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
7359           to_bitField0_ |= 0x00000002;
7360         }
7361         result.family_ = family_;
7362         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
7363           to_bitField0_ |= 0x00000004;
7364         }
7365         result.qualifier_ = qualifier_;
7366         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
7367           to_bitField0_ |= 0x00000008;
7368         }
7369         result.compareType_ = compareType_;
7370         if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
7371           to_bitField0_ |= 0x00000010;
7372         }
7373         if (comparatorBuilder_ == null) {
7374           result.comparator_ = comparator_;
7375         } else {
7376           result.comparator_ = comparatorBuilder_.build();
7377         }
7378         result.bitField0_ = to_bitField0_;
7379         onBuilt();
7380         return result;
7381       }
7382 
7383       public Builder mergeFrom(com.google.protobuf.Message other) {
7384         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition) {
7385           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition)other);
7386         } else {
7387           super.mergeFrom(other);
7388           return this;
7389         }
7390       }
7391 
7392       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition other) {
7393         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance()) return this;
7394         if (other.hasRow()) {
7395           setRow(other.getRow());
7396         }
7397         if (other.hasFamily()) {
7398           setFamily(other.getFamily());
7399         }
7400         if (other.hasQualifier()) {
7401           setQualifier(other.getQualifier());
7402         }
7403         if (other.hasCompareType()) {
7404           setCompareType(other.getCompareType());
7405         }
7406         if (other.hasComparator()) {
7407           mergeComparator(other.getComparator());
7408         }
7409         this.mergeUnknownFields(other.getUnknownFields());
7410         return this;
7411       }
7412 
7413       public final boolean isInitialized() {
7414         if (!hasRow()) {
7415           
7416           return false;
7417         }
7418         if (!hasFamily()) {
7419           
7420           return false;
7421         }
7422         if (!hasQualifier()) {
7423           
7424           return false;
7425         }
7426         if (!hasCompareType()) {
7427           
7428           return false;
7429         }
7430         if (!hasComparator()) {
7431           
7432           return false;
7433         }
7434         if (!getComparator().isInitialized()) {
7435           
7436           return false;
7437         }
7438         return true;
7439       }
7440 
7441       public Builder mergeFrom(
7442           com.google.protobuf.CodedInputStream input,
7443           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7444           throws java.io.IOException {
7445         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parsedMessage = null;
7446         try {
7447           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
7448         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
7449           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition) e.getUnfinishedMessage();
7450           throw e;
7451         } finally {
7452           if (parsedMessage != null) {
7453             mergeFrom(parsedMessage);
7454           }
7455         }
7456         return this;
7457       }
7458       private int bitField0_;
7459 
7460       // required bytes row = 1;
7461       private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY;
7462       /**
7463        * <code>required bytes row = 1;</code>
7464        */
7465       public boolean hasRow() {
7466         return ((bitField0_ & 0x00000001) == 0x00000001);
7467       }
7468       /**
7469        * <code>required bytes row = 1;</code>
7470        */
7471       public com.google.protobuf.ByteString getRow() {
7472         return row_;
7473       }
7474       /**
7475        * <code>required bytes row = 1;</code>
7476        */
7477       public Builder setRow(com.google.protobuf.ByteString value) {
7478         if (value == null) {
7479     throw new NullPointerException();
7480   }
7481   bitField0_ |= 0x00000001;
7482         row_ = value;
7483         onChanged();
7484         return this;
7485       }
7486       /**
7487        * <code>required bytes row = 1;</code>
7488        */
7489       public Builder clearRow() {
7490         bitField0_ = (bitField0_ & ~0x00000001);
7491         row_ = getDefaultInstance().getRow();
7492         onChanged();
7493         return this;
7494       }
7495 
7496       // required bytes family = 2;
7497       private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY;
7498       /**
7499        * <code>required bytes family = 2;</code>
7500        */
7501       public boolean hasFamily() {
7502         return ((bitField0_ & 0x00000002) == 0x00000002);
7503       }
7504       /**
7505        * <code>required bytes family = 2;</code>
7506        */
7507       public com.google.protobuf.ByteString getFamily() {
7508         return family_;
7509       }
7510       /**
7511        * <code>required bytes family = 2;</code>
7512        */
7513       public Builder setFamily(com.google.protobuf.ByteString value) {
7514         if (value == null) {
7515     throw new NullPointerException();
7516   }
7517   bitField0_ |= 0x00000002;
7518         family_ = value;
7519         onChanged();
7520         return this;
7521       }
7522       /**
7523        * <code>required bytes family = 2;</code>
7524        */
7525       public Builder clearFamily() {
7526         bitField0_ = (bitField0_ & ~0x00000002);
7527         family_ = getDefaultInstance().getFamily();
7528         onChanged();
7529         return this;
7530       }
7531 
7532       // required bytes qualifier = 3;
7533       private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY;
7534       /**
7535        * <code>required bytes qualifier = 3;</code>
7536        */
7537       public boolean hasQualifier() {
7538         return ((bitField0_ & 0x00000004) == 0x00000004);
7539       }
7540       /**
7541        * <code>required bytes qualifier = 3;</code>
7542        */
7543       public com.google.protobuf.ByteString getQualifier() {
7544         return qualifier_;
7545       }
7546       /**
7547        * <code>required bytes qualifier = 3;</code>
7548        */
7549       public Builder setQualifier(com.google.protobuf.ByteString value) {
7550         if (value == null) {
7551     throw new NullPointerException();
7552   }
7553   bitField0_ |= 0x00000004;
7554         qualifier_ = value;
7555         onChanged();
7556         return this;
7557       }
7558       /**
7559        * <code>required bytes qualifier = 3;</code>
7560        */
7561       public Builder clearQualifier() {
7562         bitField0_ = (bitField0_ & ~0x00000004);
7563         qualifier_ = getDefaultInstance().getQualifier();
7564         onChanged();
7565         return this;
7566       }
7567 
7568       // required .hbase.pb.CompareType compare_type = 4;
7569       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType compareType_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS;
7570       /**
7571        * <code>required .hbase.pb.CompareType compare_type = 4;</code>
7572        */
7573       public boolean hasCompareType() {
7574         return ((bitField0_ & 0x00000008) == 0x00000008);
7575       }
7576       /**
7577        * <code>required .hbase.pb.CompareType compare_type = 4;</code>
7578        */
7579       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareType() {
7580         return compareType_;
7581       }
7582       /**
7583        * <code>required .hbase.pb.CompareType compare_type = 4;</code>
7584        */
7585       public Builder setCompareType(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType value) {
7586         if (value == null) {
7587           throw new NullPointerException();
7588         }
7589         bitField0_ |= 0x00000008;
7590         compareType_ = value;
7591         onChanged();
7592         return this;
7593       }
7594       /**
7595        * <code>required .hbase.pb.CompareType compare_type = 4;</code>
7596        */
7597       public Builder clearCompareType() {
7598         bitField0_ = (bitField0_ & ~0x00000008);
7599         compareType_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS;
7600         onChanged();
7601         return this;
7602       }
7603 
7604       // required .hbase.pb.Comparator comparator = 5;
7605       private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance();
7606       private com.google.protobuf.SingleFieldBuilder<
7607           org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder> comparatorBuilder_;
7608       /**
7609        * <code>required .hbase.pb.Comparator comparator = 5;</code>
7610        */
7611       public boolean hasComparator() {
7612         return ((bitField0_ & 0x00000010) == 0x00000010);
7613       }
7614       /**
7615        * <code>required .hbase.pb.Comparator comparator = 5;</code>
7616        */
7617       public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator() {
7618         if (comparatorBuilder_ == null) {
7619           return comparator_;
7620         } else {
7621           return comparatorBuilder_.getMessage();
7622         }
7623       }
7624       /**
7625        * <code>required .hbase.pb.Comparator comparator = 5;</code>
7626        */
7627       public Builder setComparator(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator value) {
7628         if (comparatorBuilder_ == null) {
7629           if (value == null) {
7630             throw new NullPointerException();
7631           }
7632           comparator_ = value;
7633           onChanged();
7634         } else {
7635           comparatorBuilder_.setMessage(value);
7636         }
7637         bitField0_ |= 0x00000010;
7638         return this;
7639       }
7640       /**
7641        * <code>required .hbase.pb.Comparator comparator = 5;</code>
7642        */
7643       public Builder setComparator(
7644           org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder builderForValue) {
7645         if (comparatorBuilder_ == null) {
7646           comparator_ = builderForValue.build();
7647           onChanged();
7648         } else {
7649           comparatorBuilder_.setMessage(builderForValue.build());
7650         }
7651         bitField0_ |= 0x00000010;
7652         return this;
7653       }
7654       /**
7655        * <code>required .hbase.pb.Comparator comparator = 5;</code>
7656        */
7657       public Builder mergeComparator(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator value) {
7658         if (comparatorBuilder_ == null) {
7659           if (((bitField0_ & 0x00000010) == 0x00000010) &&
7660               comparator_ != org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance()) {
7661             comparator_ =
7662               org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.newBuilder(comparator_).mergeFrom(value).buildPartial();
7663           } else {
7664             comparator_ = value;
7665           }
7666           onChanged();
7667         } else {
7668           comparatorBuilder_.mergeFrom(value);
7669         }
7670         bitField0_ |= 0x00000010;
7671         return this;
7672       }
7673       /**
7674        * <code>required .hbase.pb.Comparator comparator = 5;</code>
7675        */
7676       public Builder clearComparator() {
7677         if (comparatorBuilder_ == null) {
7678           comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance();
7679           onChanged();
7680         } else {
7681           comparatorBuilder_.clear();
7682         }
7683         bitField0_ = (bitField0_ & ~0x00000010);
7684         return this;
7685       }
7686       /**
7687        * <code>required .hbase.pb.Comparator comparator = 5;</code>
7688        */
7689       public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder getComparatorBuilder() {
7690         bitField0_ |= 0x00000010;
7691         onChanged();
7692         return getComparatorFieldBuilder().getBuilder();
7693       }
7694       /**
7695        * <code>required .hbase.pb.Comparator comparator = 5;</code>
7696        */
7697       public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder() {
7698         if (comparatorBuilder_ != null) {
7699           return comparatorBuilder_.getMessageOrBuilder();
7700         } else {
7701           return comparator_;
7702         }
7703       }
7704       /**
7705        * <code>required .hbase.pb.Comparator comparator = 5;</code>
7706        */
7707       private com.google.protobuf.SingleFieldBuilder<
7708           org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder> 
7709           getComparatorFieldBuilder() {
7710         if (comparatorBuilder_ == null) {
7711           comparatorBuilder_ = new com.google.protobuf.SingleFieldBuilder<
7712               org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder>(
7713                   comparator_,
7714                   getParentForChildren(),
7715                   isClean());
7716           comparator_ = null;
7717         }
7718         return comparatorBuilder_;
7719       }
7720 
7721       // @@protoc_insertion_point(builder_scope:hbase.pb.Condition)
7722     }
7723 
7724     static {
7725       defaultInstance = new Condition(true);
7726       defaultInstance.initFields();
7727     }
7728 
7729     // @@protoc_insertion_point(class_scope:hbase.pb.Condition)
7730   }
7731 
7732   public interface MutationProtoOrBuilder
7733       extends com.google.protobuf.MessageOrBuilder {
7734 
7735     // optional bytes row = 1;
7736     /**
7737      * <code>optional bytes row = 1;</code>
7738      */
7739     boolean hasRow();
7740     /**
7741      * <code>optional bytes row = 1;</code>
7742      */
7743     com.google.protobuf.ByteString getRow();
7744 
7745     // optional .hbase.pb.MutationProto.MutationType mutate_type = 2;
7746     /**
7747      * <code>optional .hbase.pb.MutationProto.MutationType mutate_type = 2;</code>
7748      */
7749     boolean hasMutateType();
7750     /**
7751      * <code>optional .hbase.pb.MutationProto.MutationType mutate_type = 2;</code>
7752      */
7753     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType getMutateType();
7754 
7755     // repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;
7756     /**
7757      * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
7758      */
7759     java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue> 
7760         getColumnValueList();
7761     /**
7762      * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
7763      */
7764     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue getColumnValue(int index);
7765     /**
7766      * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
7767      */
7768     int getColumnValueCount();
7769     /**
7770      * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
7771      */
7772     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder> 
7773         getColumnValueOrBuilderList();
7774     /**
7775      * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
7776      */
7777     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder getColumnValueOrBuilder(
7778         int index);
7779 
7780     // optional uint64 timestamp = 4;
7781     /**
7782      * <code>optional uint64 timestamp = 4;</code>
7783      */
7784     boolean hasTimestamp();
7785     /**
7786      * <code>optional uint64 timestamp = 4;</code>
7787      */
7788     long getTimestamp();
7789 
7790     // repeated .hbase.pb.NameBytesPair attribute = 5;
7791     /**
7792      * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
7793      */
7794     java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> 
7795         getAttributeList();
7796     /**
7797      * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
7798      */
7799     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index);
7800     /**
7801      * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
7802      */
7803     int getAttributeCount();
7804     /**
7805      * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
7806      */
7807     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> 
7808         getAttributeOrBuilderList();
7809     /**
7810      * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
7811      */
7812     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder(
7813         int index);
7814 
7815     // optional .hbase.pb.MutationProto.Durability durability = 6 [default = USE_DEFAULT];
7816     /**
7817      * <code>optional .hbase.pb.MutationProto.Durability durability = 6 [default = USE_DEFAULT];</code>
7818      */
7819     boolean hasDurability();
7820     /**
7821      * <code>optional .hbase.pb.MutationProto.Durability durability = 6 [default = USE_DEFAULT];</code>
7822      */
7823     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability getDurability();
7824 
7825     // optional .hbase.pb.TimeRange time_range = 7;
7826     /**
7827      * <code>optional .hbase.pb.TimeRange time_range = 7;</code>
7828      *
7829      * <pre>
7830      * For some mutations, a result may be returned, in which case,
7831      * time range can be specified for potential performance gain
7832      * </pre>
7833      */
7834     boolean hasTimeRange();
7835     /**
7836      * <code>optional .hbase.pb.TimeRange time_range = 7;</code>
7837      *
7838      * <pre>
7839      * For some mutations, a result may be returned, in which case,
7840      * time range can be specified for potential performance gain
7841      * </pre>
7842      */
7843     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange();
7844     /**
7845      * <code>optional .hbase.pb.TimeRange time_range = 7;</code>
7846      *
7847      * <pre>
7848      * For some mutations, a result may be returned, in which case,
7849      * time range can be specified for potential performance gain
7850      * </pre>
7851      */
7852     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder();
7853 
7854     // optional int32 associated_cell_count = 8;
7855     /**
7856      * <code>optional int32 associated_cell_count = 8;</code>
7857      *
7858      * <pre>
7859      * The below count is set when the associated cells are NOT
7860      * part of this protobuf message; they are passed alongside
7861      * and then this Message is a placeholder with metadata.  The
7862      * count is needed to know how many to peel off the block of Cells as
7863      * ours.  NOTE: This is different from the pb managed cell_count of the
7864      * 'cell' field above which is non-null when the cells are pb'd.
7865      * </pre>
7866      */
7867     boolean hasAssociatedCellCount();
7868     /**
7869      * <code>optional int32 associated_cell_count = 8;</code>
7870      *
7871      * <pre>
7872      * The below count is set when the associated cells are NOT
7873      * part of this protobuf message; they are passed alongside
7874      * and then this Message is a placeholder with metadata.  The
7875      * count is needed to know how many to peel off the block of Cells as
7876      * ours.  NOTE: This is different from the pb managed cell_count of the
7877      * 'cell' field above which is non-null when the cells are pb'd.
7878      * </pre>
7879      */
7880     int getAssociatedCellCount();
7881 
7882     // optional uint64 nonce = 9;
7883     /**
7884      * <code>optional uint64 nonce = 9;</code>
7885      */
7886     boolean hasNonce();
7887     /**
7888      * <code>optional uint64 nonce = 9;</code>
7889      */
7890     long getNonce();
7891   }
7892   /**
7893    * Protobuf type {@code hbase.pb.MutationProto}
7894    *
7895    * <pre>
7896    **
7897    * A specific mutation inside a mutate request.
7898    * It can be an append, increment, put or delete based
7899    * on the mutation type.  It can be fully filled in or
7900    * only metadata present because data is being carried
7901    * elsewhere outside of pb.
7902    * </pre>
7903    */
7904   public static final class MutationProto extends
7905       com.google.protobuf.GeneratedMessage
7906       implements MutationProtoOrBuilder {
7907     // Use MutationProto.newBuilder() to construct.
7908     private MutationProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
7909       super(builder);
7910       this.unknownFields = builder.getUnknownFields();
7911     }
7912     private MutationProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
7913 
7914     private static final MutationProto defaultInstance;
7915     public static MutationProto getDefaultInstance() {
7916       return defaultInstance;
7917     }
7918 
7919     public MutationProto getDefaultInstanceForType() {
7920       return defaultInstance;
7921     }
7922 
7923     private final com.google.protobuf.UnknownFieldSet unknownFields;
7924     @java.lang.Override
7925     public final com.google.protobuf.UnknownFieldSet
7926         getUnknownFields() {
7927       return this.unknownFields;
7928     }
7929     private MutationProto(
7930         com.google.protobuf.CodedInputStream input,
7931         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7932         throws com.google.protobuf.InvalidProtocolBufferException {
7933       initFields();
7934       int mutable_bitField0_ = 0;
7935       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
7936           com.google.protobuf.UnknownFieldSet.newBuilder();
7937       try {
7938         boolean done = false;
7939         while (!done) {
7940           int tag = input.readTag();
7941           switch (tag) {
7942             case 0:
7943               done = true;
7944               break;
7945             default: {
7946               if (!parseUnknownField(input, unknownFields,
7947                                      extensionRegistry, tag)) {
7948                 done = true;
7949               }
7950               break;
7951             }
7952             case 10: {
7953               bitField0_ |= 0x00000001;
7954               row_ = input.readBytes();
7955               break;
7956             }
7957             case 16: {
7958               int rawValue = input.readEnum();
7959               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType value = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType.valueOf(rawValue);
7960               if (value == null) {
7961                 unknownFields.mergeVarintField(2, rawValue);
7962               } else {
7963                 bitField0_ |= 0x00000002;
7964                 mutateType_ = value;
7965               }
7966               break;
7967             }
7968             case 26: {
7969               if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
7970                 columnValue_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue>();
7971                 mutable_bitField0_ |= 0x00000004;
7972               }
7973               columnValue_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.PARSER, extensionRegistry));
7974               break;
7975             }
7976             case 32: {
7977               bitField0_ |= 0x00000004;
7978               timestamp_ = input.readUInt64();
7979               break;
7980             }
7981             case 42: {
7982               if (!((mutable_bitField0_ & 0x00000010) == 0x00000010)) {
7983                 attribute_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair>();
7984                 mutable_bitField0_ |= 0x00000010;
7985               }
7986               attribute_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.PARSER, extensionRegistry));
7987               break;
7988             }
7989             case 48: {
7990               int rawValue = input.readEnum();
7991               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability value = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability.valueOf(rawValue);
7992               if (value == null) {
7993                 unknownFields.mergeVarintField(6, rawValue);
7994               } else {
7995                 bitField0_ |= 0x00000008;
7996                 durability_ = value;
7997               }
7998               break;
7999             }
8000             case 58: {
8001               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder subBuilder = null;
8002               if (((bitField0_ & 0x00000010) == 0x00000010)) {
8003                 subBuilder = timeRange_.toBuilder();
8004               }
8005               timeRange_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.PARSER, extensionRegistry);
8006               if (subBuilder != null) {
8007                 subBuilder.mergeFrom(timeRange_);
8008                 timeRange_ = subBuilder.buildPartial();
8009               }
8010               bitField0_ |= 0x00000010;
8011               break;
8012             }
8013             case 64: {
8014               bitField0_ |= 0x00000020;
8015               associatedCellCount_ = input.readInt32();
8016               break;
8017             }
8018             case 72: {
8019               bitField0_ |= 0x00000040;
8020               nonce_ = input.readUInt64();
8021               break;
8022             }
8023           }
8024         }
8025       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
8026         throw e.setUnfinishedMessage(this);
8027       } catch (java.io.IOException e) {
8028         throw new com.google.protobuf.InvalidProtocolBufferException(
8029             e.getMessage()).setUnfinishedMessage(this);
8030       } finally {
8031         if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
8032           columnValue_ = java.util.Collections.unmodifiableList(columnValue_);
8033         }
8034         if (((mutable_bitField0_ & 0x00000010) == 0x00000010)) {
8035           attribute_ = java.util.Collections.unmodifiableList(attribute_);
8036         }
8037         this.unknownFields = unknownFields.build();
8038         makeExtensionsImmutable();
8039       }
8040     }
8041     public static final com.google.protobuf.Descriptors.Descriptor
8042         getDescriptor() {
8043       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_descriptor;
8044     }
8045 
8046     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
8047         internalGetFieldAccessorTable() {
8048       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_fieldAccessorTable
8049           .ensureFieldAccessorsInitialized(
8050               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder.class);
8051     }
8052 
8053     public static com.google.protobuf.Parser<MutationProto> PARSER =
8054         new com.google.protobuf.AbstractParser<MutationProto>() {
8055       public MutationProto parsePartialFrom(
8056           com.google.protobuf.CodedInputStream input,
8057           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8058           throws com.google.protobuf.InvalidProtocolBufferException {
8059         return new MutationProto(input, extensionRegistry);
8060       }
8061     };
8062 
8063     @java.lang.Override
8064     public com.google.protobuf.Parser<MutationProto> getParserForType() {
8065       return PARSER;
8066     }
8067 
8068     /**
8069      * Protobuf enum {@code hbase.pb.MutationProto.Durability}
8070      */
8071     public enum Durability
8072         implements com.google.protobuf.ProtocolMessageEnum {
8073       /**
8074        * <code>USE_DEFAULT = 0;</code>
8075        */
8076       USE_DEFAULT(0, 0),
8077       /**
8078        * <code>SKIP_WAL = 1;</code>
8079        */
8080       SKIP_WAL(1, 1),
8081       /**
8082        * <code>ASYNC_WAL = 2;</code>
8083        */
8084       ASYNC_WAL(2, 2),
8085       /**
8086        * <code>SYNC_WAL = 3;</code>
8087        */
8088       SYNC_WAL(3, 3),
8089       /**
8090        * <code>FSYNC_WAL = 4;</code>
8091        */
8092       FSYNC_WAL(4, 4),
8093       ;
8094 
8095       /**
8096        * <code>USE_DEFAULT = 0;</code>
8097        */
8098       public static final int USE_DEFAULT_VALUE = 0;
8099       /**
8100        * <code>SKIP_WAL = 1;</code>
8101        */
8102       public static final int SKIP_WAL_VALUE = 1;
8103       /**
8104        * <code>ASYNC_WAL = 2;</code>
8105        */
8106       public static final int ASYNC_WAL_VALUE = 2;
8107       /**
8108        * <code>SYNC_WAL = 3;</code>
8109        */
8110       public static final int SYNC_WAL_VALUE = 3;
8111       /**
8112        * <code>FSYNC_WAL = 4;</code>
8113        */
8114       public static final int FSYNC_WAL_VALUE = 4;
8115 
8116 
8117       public final int getNumber() { return value; }
8118 
8119       public static Durability valueOf(int value) {
8120         switch (value) {
8121           case 0: return USE_DEFAULT;
8122           case 1: return SKIP_WAL;
8123           case 2: return ASYNC_WAL;
8124           case 3: return SYNC_WAL;
8125           case 4: return FSYNC_WAL;
8126           default: return null;
8127         }
8128       }
8129 
8130       public static com.google.protobuf.Internal.EnumLiteMap<Durability>
8131           internalGetValueMap() {
8132         return internalValueMap;
8133       }
8134       private static com.google.protobuf.Internal.EnumLiteMap<Durability>
8135           internalValueMap =
8136             new com.google.protobuf.Internal.EnumLiteMap<Durability>() {
8137               public Durability findValueByNumber(int number) {
8138                 return Durability.valueOf(number);
8139               }
8140             };
8141 
8142       public final com.google.protobuf.Descriptors.EnumValueDescriptor
8143           getValueDescriptor() {
8144         return getDescriptor().getValues().get(index);
8145       }
8146       public final com.google.protobuf.Descriptors.EnumDescriptor
8147           getDescriptorForType() {
8148         return getDescriptor();
8149       }
8150       public static final com.google.protobuf.Descriptors.EnumDescriptor
8151           getDescriptor() {
8152         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDescriptor().getEnumTypes().get(0);
8153       }
8154 
8155       private static final Durability[] VALUES = values();
8156 
8157       public static Durability valueOf(
8158           com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
8159         if (desc.getType() != getDescriptor()) {
8160           throw new java.lang.IllegalArgumentException(
8161             "EnumValueDescriptor is not for this type.");
8162         }
8163         return VALUES[desc.getIndex()];
8164       }
8165 
8166       private final int index;
8167       private final int value;
8168 
8169       private Durability(int index, int value) {
8170         this.index = index;
8171         this.value = value;
8172       }
8173 
8174       // @@protoc_insertion_point(enum_scope:hbase.pb.MutationProto.Durability)
8175     }
8176 
8177     /**
8178      * Protobuf enum {@code hbase.pb.MutationProto.MutationType}
8179      */
8180     public enum MutationType
8181         implements com.google.protobuf.ProtocolMessageEnum {
8182       /**
8183        * <code>APPEND = 0;</code>
8184        */
8185       APPEND(0, 0),
8186       /**
8187        * <code>INCREMENT = 1;</code>
8188        */
8189       INCREMENT(1, 1),
8190       /**
8191        * <code>PUT = 2;</code>
8192        */
8193       PUT(2, 2),
8194       /**
8195        * <code>DELETE = 3;</code>
8196        */
8197       DELETE(3, 3),
8198       ;
8199 
8200       /**
8201        * <code>APPEND = 0;</code>
8202        */
8203       public static final int APPEND_VALUE = 0;
8204       /**
8205        * <code>INCREMENT = 1;</code>
8206        */
8207       public static final int INCREMENT_VALUE = 1;
8208       /**
8209        * <code>PUT = 2;</code>
8210        */
8211       public static final int PUT_VALUE = 2;
8212       /**
8213        * <code>DELETE = 3;</code>
8214        */
8215       public static final int DELETE_VALUE = 3;
8216 
8217 
8218       public final int getNumber() { return value; }
8219 
8220       public static MutationType valueOf(int value) {
8221         switch (value) {
8222           case 0: return APPEND;
8223           case 1: return INCREMENT;
8224           case 2: return PUT;
8225           case 3: return DELETE;
8226           default: return null;
8227         }
8228       }
8229 
8230       public static com.google.protobuf.Internal.EnumLiteMap<MutationType>
8231           internalGetValueMap() {
8232         return internalValueMap;
8233       }
8234       private static com.google.protobuf.Internal.EnumLiteMap<MutationType>
8235           internalValueMap =
8236             new com.google.protobuf.Internal.EnumLiteMap<MutationType>() {
8237               public MutationType findValueByNumber(int number) {
8238                 return MutationType.valueOf(number);
8239               }
8240             };
8241 
8242       public final com.google.protobuf.Descriptors.EnumValueDescriptor
8243           getValueDescriptor() {
8244         return getDescriptor().getValues().get(index);
8245       }
8246       public final com.google.protobuf.Descriptors.EnumDescriptor
8247           getDescriptorForType() {
8248         return getDescriptor();
8249       }
8250       public static final com.google.protobuf.Descriptors.EnumDescriptor
8251           getDescriptor() {
8252         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDescriptor().getEnumTypes().get(1);
8253       }
8254 
8255       private static final MutationType[] VALUES = values();
8256 
8257       public static MutationType valueOf(
8258           com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
8259         if (desc.getType() != getDescriptor()) {
8260           throw new java.lang.IllegalArgumentException(
8261             "EnumValueDescriptor is not for this type.");
8262         }
8263         return VALUES[desc.getIndex()];
8264       }
8265 
8266       private final int index;
8267       private final int value;
8268 
8269       private MutationType(int index, int value) {
8270         this.index = index;
8271         this.value = value;
8272       }
8273 
8274       // @@protoc_insertion_point(enum_scope:hbase.pb.MutationProto.MutationType)
8275     }
8276 
8277     /**
8278      * Protobuf enum {@code hbase.pb.MutationProto.DeleteType}
8279      */
8280     public enum DeleteType
8281         implements com.google.protobuf.ProtocolMessageEnum {
8282       /**
8283        * <code>DELETE_ONE_VERSION = 0;</code>
8284        */
8285       DELETE_ONE_VERSION(0, 0),
8286       /**
8287        * <code>DELETE_MULTIPLE_VERSIONS = 1;</code>
8288        */
8289       DELETE_MULTIPLE_VERSIONS(1, 1),
8290       /**
8291        * <code>DELETE_FAMILY = 2;</code>
8292        */
8293       DELETE_FAMILY(2, 2),
8294       /**
8295        * <code>DELETE_FAMILY_VERSION = 3;</code>
8296        */
8297       DELETE_FAMILY_VERSION(3, 3),
8298       ;
8299 
8300       /**
8301        * <code>DELETE_ONE_VERSION = 0;</code>
8302        */
8303       public static final int DELETE_ONE_VERSION_VALUE = 0;
8304       /**
8305        * <code>DELETE_MULTIPLE_VERSIONS = 1;</code>
8306        */
8307       public static final int DELETE_MULTIPLE_VERSIONS_VALUE = 1;
8308       /**
8309        * <code>DELETE_FAMILY = 2;</code>
8310        */
8311       public static final int DELETE_FAMILY_VALUE = 2;
8312       /**
8313        * <code>DELETE_FAMILY_VERSION = 3;</code>
8314        */
8315       public static final int DELETE_FAMILY_VERSION_VALUE = 3;
8316 
8317 
8318       public final int getNumber() { return value; }
8319 
8320       public static DeleteType valueOf(int value) {
8321         switch (value) {
8322           case 0: return DELETE_ONE_VERSION;
8323           case 1: return DELETE_MULTIPLE_VERSIONS;
8324           case 2: return DELETE_FAMILY;
8325           case 3: return DELETE_FAMILY_VERSION;
8326           default: return null;
8327         }
8328       }
8329 
8330       public static com.google.protobuf.Internal.EnumLiteMap<DeleteType>
8331           internalGetValueMap() {
8332         return internalValueMap;
8333       }
8334       private static com.google.protobuf.Internal.EnumLiteMap<DeleteType>
8335           internalValueMap =
8336             new com.google.protobuf.Internal.EnumLiteMap<DeleteType>() {
8337               public DeleteType findValueByNumber(int number) {
8338                 return DeleteType.valueOf(number);
8339               }
8340             };
8341 
8342       public final com.google.protobuf.Descriptors.EnumValueDescriptor
8343           getValueDescriptor() {
8344         return getDescriptor().getValues().get(index);
8345       }
8346       public final com.google.protobuf.Descriptors.EnumDescriptor
8347           getDescriptorForType() {
8348         return getDescriptor();
8349       }
8350       public static final com.google.protobuf.Descriptors.EnumDescriptor
8351           getDescriptor() {
8352         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDescriptor().getEnumTypes().get(2);
8353       }
8354 
8355       private static final DeleteType[] VALUES = values();
8356 
8357       public static DeleteType valueOf(
8358           com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
8359         if (desc.getType() != getDescriptor()) {
8360           throw new java.lang.IllegalArgumentException(
8361             "EnumValueDescriptor is not for this type.");
8362         }
8363         return VALUES[desc.getIndex()];
8364       }
8365 
8366       private final int index;
8367       private final int value;
8368 
8369       private DeleteType(int index, int value) {
8370         this.index = index;
8371         this.value = value;
8372       }
8373 
8374       // @@protoc_insertion_point(enum_scope:hbase.pb.MutationProto.DeleteType)
8375     }
8376 
8377     public interface ColumnValueOrBuilder
8378         extends com.google.protobuf.MessageOrBuilder {
8379 
8380       // required bytes family = 1;
8381       /**
8382        * <code>required bytes family = 1;</code>
8383        */
8384       boolean hasFamily();
8385       /**
8386        * <code>required bytes family = 1;</code>
8387        */
8388       com.google.protobuf.ByteString getFamily();
8389 
8390       // repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;
8391       /**
8392        * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
8393        */
8394       java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue> 
8395           getQualifierValueList();
8396       /**
8397        * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
8398        */
8399       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue getQualifierValue(int index);
8400       /**
8401        * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
8402        */
8403       int getQualifierValueCount();
8404       /**
8405        * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
8406        */
8407       java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder> 
8408           getQualifierValueOrBuilderList();
8409       /**
8410        * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
8411        */
8412       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder getQualifierValueOrBuilder(
8413           int index);
8414     }
8415     /**
8416      * Protobuf type {@code hbase.pb.MutationProto.ColumnValue}
8417      */
8418     public static final class ColumnValue extends
8419         com.google.protobuf.GeneratedMessage
8420         implements ColumnValueOrBuilder {
8421       // Use ColumnValue.newBuilder() to construct.
8422       private ColumnValue(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
8423         super(builder);
8424         this.unknownFields = builder.getUnknownFields();
8425       }
8426       private ColumnValue(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
8427 
8428       private static final ColumnValue defaultInstance;
8429       public static ColumnValue getDefaultInstance() {
8430         return defaultInstance;
8431       }
8432 
8433       public ColumnValue getDefaultInstanceForType() {
8434         return defaultInstance;
8435       }
8436 
8437       private final com.google.protobuf.UnknownFieldSet unknownFields;
8438       @java.lang.Override
8439       public final com.google.protobuf.UnknownFieldSet
8440           getUnknownFields() {
8441         return this.unknownFields;
8442       }
8443       private ColumnValue(
8444           com.google.protobuf.CodedInputStream input,
8445           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8446           throws com.google.protobuf.InvalidProtocolBufferException {
8447         initFields();
8448         int mutable_bitField0_ = 0;
8449         com.google.protobuf.UnknownFieldSet.Builder unknownFields =
8450             com.google.protobuf.UnknownFieldSet.newBuilder();
8451         try {
8452           boolean done = false;
8453           while (!done) {
8454             int tag = input.readTag();
8455             switch (tag) {
8456               case 0:
8457                 done = true;
8458                 break;
8459               default: {
8460                 if (!parseUnknownField(input, unknownFields,
8461                                        extensionRegistry, tag)) {
8462                   done = true;
8463                 }
8464                 break;
8465               }
8466               case 10: {
8467                 bitField0_ |= 0x00000001;
8468                 family_ = input.readBytes();
8469                 break;
8470               }
8471               case 18: {
8472                 if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
8473                   qualifierValue_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue>();
8474                   mutable_bitField0_ |= 0x00000002;
8475                 }
8476                 qualifierValue_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.PARSER, extensionRegistry));
8477                 break;
8478               }
8479             }
8480           }
8481         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
8482           throw e.setUnfinishedMessage(this);
8483         } catch (java.io.IOException e) {
8484           throw new com.google.protobuf.InvalidProtocolBufferException(
8485               e.getMessage()).setUnfinishedMessage(this);
8486         } finally {
8487           if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
8488             qualifierValue_ = java.util.Collections.unmodifiableList(qualifierValue_);
8489           }
8490           this.unknownFields = unknownFields.build();
8491           makeExtensionsImmutable();
8492         }
8493       }
8494       public static final com.google.protobuf.Descriptors.Descriptor
8495           getDescriptor() {
8496         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_ColumnValue_descriptor;
8497       }
8498 
8499       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
8500           internalGetFieldAccessorTable() {
8501         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_ColumnValue_fieldAccessorTable
8502             .ensureFieldAccessorsInitialized(
8503                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder.class);
8504       }
8505 
8506       public static com.google.protobuf.Parser<ColumnValue> PARSER =
8507           new com.google.protobuf.AbstractParser<ColumnValue>() {
8508         public ColumnValue parsePartialFrom(
8509             com.google.protobuf.CodedInputStream input,
8510             com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8511             throws com.google.protobuf.InvalidProtocolBufferException {
8512           return new ColumnValue(input, extensionRegistry);
8513         }
8514       };
8515 
8516       @java.lang.Override
8517       public com.google.protobuf.Parser<ColumnValue> getParserForType() {
8518         return PARSER;
8519       }
8520 
8521       public interface QualifierValueOrBuilder
8522           extends com.google.protobuf.MessageOrBuilder {
8523 
8524         // optional bytes qualifier = 1;
8525         /**
8526          * <code>optional bytes qualifier = 1;</code>
8527          */
8528         boolean hasQualifier();
8529         /**
8530          * <code>optional bytes qualifier = 1;</code>
8531          */
8532         com.google.protobuf.ByteString getQualifier();
8533 
8534         // optional bytes value = 2;
8535         /**
8536          * <code>optional bytes value = 2;</code>
8537          */
8538         boolean hasValue();
8539         /**
8540          * <code>optional bytes value = 2;</code>
8541          */
8542         com.google.protobuf.ByteString getValue();
8543 
8544         // optional uint64 timestamp = 3;
8545         /**
8546          * <code>optional uint64 timestamp = 3;</code>
8547          */
8548         boolean hasTimestamp();
8549         /**
8550          * <code>optional uint64 timestamp = 3;</code>
8551          */
8552         long getTimestamp();
8553 
8554         // optional .hbase.pb.MutationProto.DeleteType delete_type = 4;
8555         /**
8556          * <code>optional .hbase.pb.MutationProto.DeleteType delete_type = 4;</code>
8557          */
8558         boolean hasDeleteType();
8559         /**
8560          * <code>optional .hbase.pb.MutationProto.DeleteType delete_type = 4;</code>
8561          */
8562         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType getDeleteType();
8563 
8564         // optional bytes tags = 5;
8565         /**
8566          * <code>optional bytes tags = 5;</code>
8567          */
8568         boolean hasTags();
8569         /**
8570          * <code>optional bytes tags = 5;</code>
8571          */
8572         com.google.protobuf.ByteString getTags();
8573       }
8574       /**
8575        * Protobuf type {@code hbase.pb.MutationProto.ColumnValue.QualifierValue}
8576        */
8577       public static final class QualifierValue extends
8578           com.google.protobuf.GeneratedMessage
8579           implements QualifierValueOrBuilder {
8580         // Use QualifierValue.newBuilder() to construct.
8581         private QualifierValue(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
8582           super(builder);
8583           this.unknownFields = builder.getUnknownFields();
8584         }
8585         private QualifierValue(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
8586 
8587         private static final QualifierValue defaultInstance;
8588         public static QualifierValue getDefaultInstance() {
8589           return defaultInstance;
8590         }
8591 
8592         public QualifierValue getDefaultInstanceForType() {
8593           return defaultInstance;
8594         }
8595 
8596         private final com.google.protobuf.UnknownFieldSet unknownFields;
8597         @java.lang.Override
8598         public final com.google.protobuf.UnknownFieldSet
8599             getUnknownFields() {
8600           return this.unknownFields;
8601         }
8602         private QualifierValue(
8603             com.google.protobuf.CodedInputStream input,
8604             com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8605             throws com.google.protobuf.InvalidProtocolBufferException {
8606           initFields();
8607           int mutable_bitField0_ = 0;
8608           com.google.protobuf.UnknownFieldSet.Builder unknownFields =
8609               com.google.protobuf.UnknownFieldSet.newBuilder();
8610           try {
8611             boolean done = false;
8612             while (!done) {
8613               int tag = input.readTag();
8614               switch (tag) {
8615                 case 0:
8616                   done = true;
8617                   break;
8618                 default: {
8619                   if (!parseUnknownField(input, unknownFields,
8620                                          extensionRegistry, tag)) {
8621                     done = true;
8622                   }
8623                   break;
8624                 }
8625                 case 10: {
8626                   bitField0_ |= 0x00000001;
8627                   qualifier_ = input.readBytes();
8628                   break;
8629                 }
8630                 case 18: {
8631                   bitField0_ |= 0x00000002;
8632                   value_ = input.readBytes();
8633                   break;
8634                 }
8635                 case 24: {
8636                   bitField0_ |= 0x00000004;
8637                   timestamp_ = input.readUInt64();
8638                   break;
8639                 }
8640                 case 32: {
8641                   int rawValue = input.readEnum();
8642                   org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType value = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType.valueOf(rawValue);
8643                   if (value == null) {
8644                     unknownFields.mergeVarintField(4, rawValue);
8645                   } else {
8646                     bitField0_ |= 0x00000008;
8647                     deleteType_ = value;
8648                   }
8649                   break;
8650                 }
8651                 case 42: {
8652                   bitField0_ |= 0x00000010;
8653                   tags_ = input.readBytes();
8654                   break;
8655                 }
8656               }
8657             }
8658           } catch (com.google.protobuf.InvalidProtocolBufferException e) {
8659             throw e.setUnfinishedMessage(this);
8660           } catch (java.io.IOException e) {
8661             throw new com.google.protobuf.InvalidProtocolBufferException(
8662                 e.getMessage()).setUnfinishedMessage(this);
8663           } finally {
8664             this.unknownFields = unknownFields.build();
8665             makeExtensionsImmutable();
8666           }
8667         }
8668         public static final com.google.protobuf.Descriptors.Descriptor
8669             getDescriptor() {
8670           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_ColumnValue_QualifierValue_descriptor;
8671         }
8672 
8673         protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
8674             internalGetFieldAccessorTable() {
8675           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_ColumnValue_QualifierValue_fieldAccessorTable
8676               .ensureFieldAccessorsInitialized(
8677                   org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder.class);
8678         }
8679 
8680         public static com.google.protobuf.Parser<QualifierValue> PARSER =
8681             new com.google.protobuf.AbstractParser<QualifierValue>() {
8682           public QualifierValue parsePartialFrom(
8683               com.google.protobuf.CodedInputStream input,
8684               com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8685               throws com.google.protobuf.InvalidProtocolBufferException {
8686             return new QualifierValue(input, extensionRegistry);
8687           }
8688         };
8689 
8690         @java.lang.Override
8691         public com.google.protobuf.Parser<QualifierValue> getParserForType() {
8692           return PARSER;
8693         }
8694 
8695         private int bitField0_;
8696         // optional bytes qualifier = 1;
8697         public static final int QUALIFIER_FIELD_NUMBER = 1;
8698         private com.google.protobuf.ByteString qualifier_;
8699         /**
8700          * <code>optional bytes qualifier = 1;</code>
8701          */
8702         public boolean hasQualifier() {
8703           return ((bitField0_ & 0x00000001) == 0x00000001);
8704         }
8705         /**
8706          * <code>optional bytes qualifier = 1;</code>
8707          */
8708         public com.google.protobuf.ByteString getQualifier() {
8709           return qualifier_;
8710         }
8711 
8712         // optional bytes value = 2;
8713         public static final int VALUE_FIELD_NUMBER = 2;
8714         private com.google.protobuf.ByteString value_;
8715         /**
8716          * <code>optional bytes value = 2;</code>
8717          */
8718         public boolean hasValue() {
8719           return ((bitField0_ & 0x00000002) == 0x00000002);
8720         }
8721         /**
8722          * <code>optional bytes value = 2;</code>
8723          */
8724         public com.google.protobuf.ByteString getValue() {
8725           return value_;
8726         }
8727 
8728         // optional uint64 timestamp = 3;
8729         public static final int TIMESTAMP_FIELD_NUMBER = 3;
8730         private long timestamp_;
8731         /**
8732          * <code>optional uint64 timestamp = 3;</code>
8733          */
8734         public boolean hasTimestamp() {
8735           return ((bitField0_ & 0x00000004) == 0x00000004);
8736         }
8737         /**
8738          * <code>optional uint64 timestamp = 3;</code>
8739          */
8740         public long getTimestamp() {
8741           return timestamp_;
8742         }
8743 
8744         // optional .hbase.pb.MutationProto.DeleteType delete_type = 4;
8745         public static final int DELETE_TYPE_FIELD_NUMBER = 4;
8746         private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType deleteType_;
8747         /**
8748          * <code>optional .hbase.pb.MutationProto.DeleteType delete_type = 4;</code>
8749          */
8750         public boolean hasDeleteType() {
8751           return ((bitField0_ & 0x00000008) == 0x00000008);
8752         }
8753         /**
8754          * <code>optional .hbase.pb.MutationProto.DeleteType delete_type = 4;</code>
8755          */
8756         public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType getDeleteType() {
8757           return deleteType_;
8758         }
8759 
8760         // optional bytes tags = 5;
8761         public static final int TAGS_FIELD_NUMBER = 5;
8762         private com.google.protobuf.ByteString tags_;
8763         /**
8764          * <code>optional bytes tags = 5;</code>
8765          */
8766         public boolean hasTags() {
8767           return ((bitField0_ & 0x00000010) == 0x00000010);
8768         }
8769         /**
8770          * <code>optional bytes tags = 5;</code>
8771          */
8772         public com.google.protobuf.ByteString getTags() {
8773           return tags_;
8774         }
8775 
8776         private void initFields() {
8777           qualifier_ = com.google.protobuf.ByteString.EMPTY;
8778           value_ = com.google.protobuf.ByteString.EMPTY;
8779           timestamp_ = 0L;
8780           deleteType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType.DELETE_ONE_VERSION;
8781           tags_ = com.google.protobuf.ByteString.EMPTY;
8782         }
8783         private byte memoizedIsInitialized = -1;
8784         public final boolean isInitialized() {
8785           byte isInitialized = memoizedIsInitialized;
8786           if (isInitialized != -1) return isInitialized == 1;
8787 
8788           memoizedIsInitialized = 1;
8789           return true;
8790         }
8791 
8792         public void writeTo(com.google.protobuf.CodedOutputStream output)
8793                             throws java.io.IOException {
8794           getSerializedSize();
8795           if (((bitField0_ & 0x00000001) == 0x00000001)) {
8796             output.writeBytes(1, qualifier_);
8797           }
8798           if (((bitField0_ & 0x00000002) == 0x00000002)) {
8799             output.writeBytes(2, value_);
8800           }
8801           if (((bitField0_ & 0x00000004) == 0x00000004)) {
8802             output.writeUInt64(3, timestamp_);
8803           }
8804           if (((bitField0_ & 0x00000008) == 0x00000008)) {
8805             output.writeEnum(4, deleteType_.getNumber());
8806           }
8807           if (((bitField0_ & 0x00000010) == 0x00000010)) {
8808             output.writeBytes(5, tags_);
8809           }
8810           getUnknownFields().writeTo(output);
8811         }
8812 
8813         private int memoizedSerializedSize = -1;
8814         public int getSerializedSize() {
8815           int size = memoizedSerializedSize;
8816           if (size != -1) return size;
8817 
8818           size = 0;
8819           if (((bitField0_ & 0x00000001) == 0x00000001)) {
8820             size += com.google.protobuf.CodedOutputStream
8821               .computeBytesSize(1, qualifier_);
8822           }
8823           if (((bitField0_ & 0x00000002) == 0x00000002)) {
8824             size += com.google.protobuf.CodedOutputStream
8825               .computeBytesSize(2, value_);
8826           }
8827           if (((bitField0_ & 0x00000004) == 0x00000004)) {
8828             size += com.google.protobuf.CodedOutputStream
8829               .computeUInt64Size(3, timestamp_);
8830           }
8831           if (((bitField0_ & 0x00000008) == 0x00000008)) {
8832             size += com.google.protobuf.CodedOutputStream
8833               .computeEnumSize(4, deleteType_.getNumber());
8834           }
8835           if (((bitField0_ & 0x00000010) == 0x00000010)) {
8836             size += com.google.protobuf.CodedOutputStream
8837               .computeBytesSize(5, tags_);
8838           }
8839           size += getUnknownFields().getSerializedSize();
8840           memoizedSerializedSize = size;
8841           return size;
8842         }
8843 
8844         private static final long serialVersionUID = 0L;
8845         @java.lang.Override
8846         protected java.lang.Object writeReplace()
8847             throws java.io.ObjectStreamException {
8848           return super.writeReplace();
8849         }
8850 
8851         @java.lang.Override
8852         public boolean equals(final java.lang.Object obj) {
8853           if (obj == this) {
8854            return true;
8855           }
8856           if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue)) {
8857             return super.equals(obj);
8858           }
8859           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue) obj;
8860 
8861           boolean result = true;
8862           result = result && (hasQualifier() == other.hasQualifier());
8863           if (hasQualifier()) {
8864             result = result && getQualifier()
8865                 .equals(other.getQualifier());
8866           }
8867           result = result && (hasValue() == other.hasValue());
8868           if (hasValue()) {
8869             result = result && getValue()
8870                 .equals(other.getValue());
8871           }
8872           result = result && (hasTimestamp() == other.hasTimestamp());
8873           if (hasTimestamp()) {
8874             result = result && (getTimestamp()
8875                 == other.getTimestamp());
8876           }
8877           result = result && (hasDeleteType() == other.hasDeleteType());
8878           if (hasDeleteType()) {
8879             result = result &&
8880                 (getDeleteType() == other.getDeleteType());
8881           }
8882           result = result && (hasTags() == other.hasTags());
8883           if (hasTags()) {
8884             result = result && getTags()
8885                 .equals(other.getTags());
8886           }
8887           result = result &&
8888               getUnknownFields().equals(other.getUnknownFields());
8889           return result;
8890         }
8891 
8892         private int memoizedHashCode = 0;
8893         @java.lang.Override
8894         public int hashCode() {
8895           if (memoizedHashCode != 0) {
8896             return memoizedHashCode;
8897           }
8898           int hash = 41;
8899           hash = (19 * hash) + getDescriptorForType().hashCode();
8900           if (hasQualifier()) {
8901             hash = (37 * hash) + QUALIFIER_FIELD_NUMBER;
8902             hash = (53 * hash) + getQualifier().hashCode();
8903           }
8904           if (hasValue()) {
8905             hash = (37 * hash) + VALUE_FIELD_NUMBER;
8906             hash = (53 * hash) + getValue().hashCode();
8907           }
8908           if (hasTimestamp()) {
8909             hash = (37 * hash) + TIMESTAMP_FIELD_NUMBER;
8910             hash = (53 * hash) + hashLong(getTimestamp());
8911           }
8912           if (hasDeleteType()) {
8913             hash = (37 * hash) + DELETE_TYPE_FIELD_NUMBER;
8914             hash = (53 * hash) + hashEnum(getDeleteType());
8915           }
8916           if (hasTags()) {
8917             hash = (37 * hash) + TAGS_FIELD_NUMBER;
8918             hash = (53 * hash) + getTags().hashCode();
8919           }
8920           hash = (29 * hash) + getUnknownFields().hashCode();
8921           memoizedHashCode = hash;
8922           return hash;
8923         }
8924 
8925         public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom(
8926             com.google.protobuf.ByteString data)
8927             throws com.google.protobuf.InvalidProtocolBufferException {
8928           return PARSER.parseFrom(data);
8929         }
8930         public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom(
8931             com.google.protobuf.ByteString data,
8932             com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8933             throws com.google.protobuf.InvalidProtocolBufferException {
8934           return PARSER.parseFrom(data, extensionRegistry);
8935         }
8936         public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom(byte[] data)
8937             throws com.google.protobuf.InvalidProtocolBufferException {
8938           return PARSER.parseFrom(data);
8939         }
8940         public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom(
8941             byte[] data,
8942             com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8943             throws com.google.protobuf.InvalidProtocolBufferException {
8944           return PARSER.parseFrom(data, extensionRegistry);
8945         }
8946         public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom(java.io.InputStream input)
8947             throws java.io.IOException {
8948           return PARSER.parseFrom(input);
8949         }
8950         public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom(
8951             java.io.InputStream input,
8952             com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8953             throws java.io.IOException {
8954           return PARSER.parseFrom(input, extensionRegistry);
8955         }
8956         public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseDelimitedFrom(java.io.InputStream input)
8957             throws java.io.IOException {
8958           return PARSER.parseDelimitedFrom(input);
8959         }
8960         public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseDelimitedFrom(
8961             java.io.InputStream input,
8962             com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8963             throws java.io.IOException {
8964           return PARSER.parseDelimitedFrom(input, extensionRegistry);
8965         }
8966         public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom(
8967             com.google.protobuf.CodedInputStream input)
8968             throws java.io.IOException {
8969           return PARSER.parseFrom(input);
8970         }
8971         public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom(
8972             com.google.protobuf.CodedInputStream input,
8973             com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8974             throws java.io.IOException {
8975           return PARSER.parseFrom(input, extensionRegistry);
8976         }
8977 
8978         public static Builder newBuilder() { return Builder.create(); }
8979         public Builder newBuilderForType() { return newBuilder(); }
8980         public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue prototype) {
8981           return newBuilder().mergeFrom(prototype);
8982         }
8983         public Builder toBuilder() { return newBuilder(this); }
8984 
8985         @java.lang.Override
8986         protected Builder newBuilderForType(
8987             com.google.protobuf.GeneratedMessage.BuilderParent parent) {
8988           Builder builder = new Builder(parent);
8989           return builder;
8990         }
8991         /**
8992          * Protobuf type {@code hbase.pb.MutationProto.ColumnValue.QualifierValue}
8993          */
8994         public static final class Builder extends
8995             com.google.protobuf.GeneratedMessage.Builder<Builder>
8996            implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder {
8997           public static final com.google.protobuf.Descriptors.Descriptor
8998               getDescriptor() {
8999             return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_ColumnValue_QualifierValue_descriptor;
9000           }
9001 
9002           protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
9003               internalGetFieldAccessorTable() {
9004             return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_ColumnValue_QualifierValue_fieldAccessorTable
9005                 .ensureFieldAccessorsInitialized(
9006                     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder.class);
9007           }
9008 
9009           // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.newBuilder()
9010           private Builder() {
9011             maybeForceBuilderInitialization();
9012           }
9013 
9014           private Builder(
9015               com.google.protobuf.GeneratedMessage.BuilderParent parent) {
9016             super(parent);
9017             maybeForceBuilderInitialization();
9018           }
9019           private void maybeForceBuilderInitialization() {
9020             if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
9021             }
9022           }
9023           private static Builder create() {
9024             return new Builder();
9025           }
9026 
9027           public Builder clear() {
9028             super.clear();
9029             qualifier_ = com.google.protobuf.ByteString.EMPTY;
9030             bitField0_ = (bitField0_ & ~0x00000001);
9031             value_ = com.google.protobuf.ByteString.EMPTY;
9032             bitField0_ = (bitField0_ & ~0x00000002);
9033             timestamp_ = 0L;
9034             bitField0_ = (bitField0_ & ~0x00000004);
9035             deleteType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType.DELETE_ONE_VERSION;
9036             bitField0_ = (bitField0_ & ~0x00000008);
9037             tags_ = com.google.protobuf.ByteString.EMPTY;
9038             bitField0_ = (bitField0_ & ~0x00000010);
9039             return this;
9040           }
9041 
9042           public Builder clone() {
9043             return create().mergeFrom(buildPartial());
9044           }
9045 
9046           public com.google.protobuf.Descriptors.Descriptor
9047               getDescriptorForType() {
9048             return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_ColumnValue_QualifierValue_descriptor;
9049           }
9050 
9051           public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue getDefaultInstanceForType() {
9052             return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.getDefaultInstance();
9053           }
9054 
9055           public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue build() {
9056             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue result = buildPartial();
9057             if (!result.isInitialized()) {
9058               throw newUninitializedMessageException(result);
9059             }
9060             return result;
9061           }
9062 
9063           public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue buildPartial() {
9064             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue(this);
9065             int from_bitField0_ = bitField0_;
9066             int to_bitField0_ = 0;
9067             if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
9068               to_bitField0_ |= 0x00000001;
9069             }
9070             result.qualifier_ = qualifier_;
9071             if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
9072               to_bitField0_ |= 0x00000002;
9073             }
9074             result.value_ = value_;
9075             if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
9076               to_bitField0_ |= 0x00000004;
9077             }
9078             result.timestamp_ = timestamp_;
9079             if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
9080               to_bitField0_ |= 0x00000008;
9081             }
9082             result.deleteType_ = deleteType_;
9083             if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
9084               to_bitField0_ |= 0x00000010;
9085             }
9086             result.tags_ = tags_;
9087             result.bitField0_ = to_bitField0_;
9088             onBuilt();
9089             return result;
9090           }
9091 
9092           public Builder mergeFrom(com.google.protobuf.Message other) {
9093             if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue) {
9094               return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue)other);
9095             } else {
9096               super.mergeFrom(other);
9097               return this;
9098             }
9099           }
9100 
9101           public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue other) {
9102             if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.getDefaultInstance()) return this;
9103             if (other.hasQualifier()) {
9104               setQualifier(other.getQualifier());
9105             }
9106             if (other.hasValue()) {
9107               setValue(other.getValue());
9108             }
9109             if (other.hasTimestamp()) {
9110               setTimestamp(other.getTimestamp());
9111             }
9112             if (other.hasDeleteType()) {
9113               setDeleteType(other.getDeleteType());
9114             }
9115             if (other.hasTags()) {
9116               setTags(other.getTags());
9117             }
9118             this.mergeUnknownFields(other.getUnknownFields());
9119             return this;
9120           }
9121 
9122           public final boolean isInitialized() {
9123             return true;
9124           }
9125 
9126           public Builder mergeFrom(
9127               com.google.protobuf.CodedInputStream input,
9128               com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9129               throws java.io.IOException {
9130             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parsedMessage = null;
9131             try {
9132               parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
9133             } catch (com.google.protobuf.InvalidProtocolBufferException e) {
9134               parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue) e.getUnfinishedMessage();
9135               throw e;
9136             } finally {
9137               if (parsedMessage != null) {
9138                 mergeFrom(parsedMessage);
9139               }
9140             }
9141             return this;
9142           }
9143           private int bitField0_;
9144 
9145           // optional bytes qualifier = 1;
9146           private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY;
9147           /**
9148            * <code>optional bytes qualifier = 1;</code>
9149            */
9150           public boolean hasQualifier() {
9151             return ((bitField0_ & 0x00000001) == 0x00000001);
9152           }
9153           /**
9154            * <code>optional bytes qualifier = 1;</code>
9155            */
9156           public com.google.protobuf.ByteString getQualifier() {
9157             return qualifier_;
9158           }
9159           /**
9160            * <code>optional bytes qualifier = 1;</code>
9161            */
9162           public Builder setQualifier(com.google.protobuf.ByteString value) {
9163             if (value == null) {
9164     throw new NullPointerException();
9165   }
9166   bitField0_ |= 0x00000001;
9167             qualifier_ = value;
9168             onChanged();
9169             return this;
9170           }
9171           /**
9172            * <code>optional bytes qualifier = 1;</code>
9173            */
9174           public Builder clearQualifier() {
9175             bitField0_ = (bitField0_ & ~0x00000001);
9176             qualifier_ = getDefaultInstance().getQualifier();
9177             onChanged();
9178             return this;
9179           }
9180 
9181           // optional bytes value = 2;
9182           private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY;
9183           /**
9184            * <code>optional bytes value = 2;</code>
9185            */
9186           public boolean hasValue() {
9187             return ((bitField0_ & 0x00000002) == 0x00000002);
9188           }
9189           /**
9190            * <code>optional bytes value = 2;</code>
9191            */
9192           public com.google.protobuf.ByteString getValue() {
9193             return value_;
9194           }
9195           /**
9196            * <code>optional bytes value = 2;</code>
9197            */
9198           public Builder setValue(com.google.protobuf.ByteString value) {
9199             if (value == null) {
9200     throw new NullPointerException();
9201   }
9202   bitField0_ |= 0x00000002;
9203             value_ = value;
9204             onChanged();
9205             return this;
9206           }
9207           /**
9208            * <code>optional bytes value = 2;</code>
9209            */
9210           public Builder clearValue() {
9211             bitField0_ = (bitField0_ & ~0x00000002);
9212             value_ = getDefaultInstance().getValue();
9213             onChanged();
9214             return this;
9215           }
9216 
9217           // optional uint64 timestamp = 3;
9218           private long timestamp_ ;
9219           /**
9220            * <code>optional uint64 timestamp = 3;</code>
9221            */
9222           public boolean hasTimestamp() {
9223             return ((bitField0_ & 0x00000004) == 0x00000004);
9224           }
9225           /**
9226            * <code>optional uint64 timestamp = 3;</code>
9227            */
9228           public long getTimestamp() {
9229             return timestamp_;
9230           }
9231           /**
9232            * <code>optional uint64 timestamp = 3;</code>
9233            */
9234           public Builder setTimestamp(long value) {
9235             bitField0_ |= 0x00000004;
9236             timestamp_ = value;
9237             onChanged();
9238             return this;
9239           }
9240           /**
9241            * <code>optional uint64 timestamp = 3;</code>
9242            */
9243           public Builder clearTimestamp() {
9244             bitField0_ = (bitField0_ & ~0x00000004);
9245             timestamp_ = 0L;
9246             onChanged();
9247             return this;
9248           }
9249 
9250           // optional .hbase.pb.MutationProto.DeleteType delete_type = 4;
9251           private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType deleteType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType.DELETE_ONE_VERSION;
9252           /**
9253            * <code>optional .hbase.pb.MutationProto.DeleteType delete_type = 4;</code>
9254            */
9255           public boolean hasDeleteType() {
9256             return ((bitField0_ & 0x00000008) == 0x00000008);
9257           }
9258           /**
9259            * <code>optional .hbase.pb.MutationProto.DeleteType delete_type = 4;</code>
9260            */
9261           public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType getDeleteType() {
9262             return deleteType_;
9263           }
9264           /**
9265            * <code>optional .hbase.pb.MutationProto.DeleteType delete_type = 4;</code>
9266            */
9267           public Builder setDeleteType(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType value) {
9268             if (value == null) {
9269               throw new NullPointerException();
9270             }
9271             bitField0_ |= 0x00000008;
9272             deleteType_ = value;
9273             onChanged();
9274             return this;
9275           }
9276           /**
9277            * <code>optional .hbase.pb.MutationProto.DeleteType delete_type = 4;</code>
9278            */
9279           public Builder clearDeleteType() {
9280             bitField0_ = (bitField0_ & ~0x00000008);
9281             deleteType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType.DELETE_ONE_VERSION;
9282             onChanged();
9283             return this;
9284           }
9285 
9286           // optional bytes tags = 5;
9287           private com.google.protobuf.ByteString tags_ = com.google.protobuf.ByteString.EMPTY;
9288           /**
9289            * <code>optional bytes tags = 5;</code>
9290            */
9291           public boolean hasTags() {
9292             return ((bitField0_ & 0x00000010) == 0x00000010);
9293           }
9294           /**
9295            * <code>optional bytes tags = 5;</code>
9296            */
9297           public com.google.protobuf.ByteString getTags() {
9298             return tags_;
9299           }
9300           /**
9301            * <code>optional bytes tags = 5;</code>
9302            */
9303           public Builder setTags(com.google.protobuf.ByteString value) {
9304             if (value == null) {
9305     throw new NullPointerException();
9306   }
9307   bitField0_ |= 0x00000010;
9308             tags_ = value;
9309             onChanged();
9310             return this;
9311           }
9312           /**
9313            * <code>optional bytes tags = 5;</code>
9314            */
9315           public Builder clearTags() {
9316             bitField0_ = (bitField0_ & ~0x00000010);
9317             tags_ = getDefaultInstance().getTags();
9318             onChanged();
9319             return this;
9320           }
9321 
9322           // @@protoc_insertion_point(builder_scope:hbase.pb.MutationProto.ColumnValue.QualifierValue)
9323         }
9324 
9325         static {
9326           defaultInstance = new QualifierValue(true);
9327           defaultInstance.initFields();
9328         }
9329 
9330         // @@protoc_insertion_point(class_scope:hbase.pb.MutationProto.ColumnValue.QualifierValue)
9331       }
9332 
9333       private int bitField0_;
9334       // required bytes family = 1;
9335       public static final int FAMILY_FIELD_NUMBER = 1;
9336       private com.google.protobuf.ByteString family_;
9337       /**
9338        * <code>required bytes family = 1;</code>
9339        */
9340       public boolean hasFamily() {
9341         return ((bitField0_ & 0x00000001) == 0x00000001);
9342       }
9343       /**
9344        * <code>required bytes family = 1;</code>
9345        */
9346       public com.google.protobuf.ByteString getFamily() {
9347         return family_;
9348       }
9349 
9350       // repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;
9351       public static final int QUALIFIER_VALUE_FIELD_NUMBER = 2;
9352       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue> qualifierValue_;
9353       /**
9354        * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
9355        */
9356       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue> getQualifierValueList() {
9357         return qualifierValue_;
9358       }
9359       /**
9360        * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
9361        */
9362       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder> 
9363           getQualifierValueOrBuilderList() {
9364         return qualifierValue_;
9365       }
9366       /**
9367        * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
9368        */
9369       public int getQualifierValueCount() {
9370         return qualifierValue_.size();
9371       }
9372       /**
9373        * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
9374        */
9375       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue getQualifierValue(int index) {
9376         return qualifierValue_.get(index);
9377       }
9378       /**
9379        * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
9380        */
9381       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder getQualifierValueOrBuilder(
9382           int index) {
9383         return qualifierValue_.get(index);
9384       }
9385 
9386       private void initFields() {
9387         family_ = com.google.protobuf.ByteString.EMPTY;
9388         qualifierValue_ = java.util.Collections.emptyList();
9389       }
9390       private byte memoizedIsInitialized = -1;
9391       public final boolean isInitialized() {
9392         byte isInitialized = memoizedIsInitialized;
9393         if (isInitialized != -1) return isInitialized == 1;
9394 
9395         if (!hasFamily()) {
9396           memoizedIsInitialized = 0;
9397           return false;
9398         }
9399         memoizedIsInitialized = 1;
9400         return true;
9401       }
9402 
9403       public void writeTo(com.google.protobuf.CodedOutputStream output)
9404                           throws java.io.IOException {
9405         getSerializedSize();
9406         if (((bitField0_ & 0x00000001) == 0x00000001)) {
9407           output.writeBytes(1, family_);
9408         }
9409         for (int i = 0; i < qualifierValue_.size(); i++) {
9410           output.writeMessage(2, qualifierValue_.get(i));
9411         }
9412         getUnknownFields().writeTo(output);
9413       }
9414 
9415       private int memoizedSerializedSize = -1;
9416       public int getSerializedSize() {
9417         int size = memoizedSerializedSize;
9418         if (size != -1) return size;
9419 
9420         size = 0;
9421         if (((bitField0_ & 0x00000001) == 0x00000001)) {
9422           size += com.google.protobuf.CodedOutputStream
9423             .computeBytesSize(1, family_);
9424         }
9425         for (int i = 0; i < qualifierValue_.size(); i++) {
9426           size += com.google.protobuf.CodedOutputStream
9427             .computeMessageSize(2, qualifierValue_.get(i));
9428         }
9429         size += getUnknownFields().getSerializedSize();
9430         memoizedSerializedSize = size;
9431         return size;
9432       }
9433 
9434       private static final long serialVersionUID = 0L;
9435       @java.lang.Override
9436       protected java.lang.Object writeReplace()
9437           throws java.io.ObjectStreamException {
9438         return super.writeReplace();
9439       }
9440 
9441       @java.lang.Override
9442       public boolean equals(final java.lang.Object obj) {
9443         if (obj == this) {
9444          return true;
9445         }
9446         if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue)) {
9447           return super.equals(obj);
9448         }
9449         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue) obj;
9450 
9451         boolean result = true;
9452         result = result && (hasFamily() == other.hasFamily());
9453         if (hasFamily()) {
9454           result = result && getFamily()
9455               .equals(other.getFamily());
9456         }
9457         result = result && getQualifierValueList()
9458             .equals(other.getQualifierValueList());
9459         result = result &&
9460             getUnknownFields().equals(other.getUnknownFields());
9461         return result;
9462       }
9463 
9464       private int memoizedHashCode = 0;
9465       @java.lang.Override
9466       public int hashCode() {
9467         if (memoizedHashCode != 0) {
9468           return memoizedHashCode;
9469         }
9470         int hash = 41;
9471         hash = (19 * hash) + getDescriptorForType().hashCode();
9472         if (hasFamily()) {
9473           hash = (37 * hash) + FAMILY_FIELD_NUMBER;
9474           hash = (53 * hash) + getFamily().hashCode();
9475         }
9476         if (getQualifierValueCount() > 0) {
9477           hash = (37 * hash) + QUALIFIER_VALUE_FIELD_NUMBER;
9478           hash = (53 * hash) + getQualifierValueList().hashCode();
9479         }
9480         hash = (29 * hash) + getUnknownFields().hashCode();
9481         memoizedHashCode = hash;
9482         return hash;
9483       }
9484 
9485       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom(
9486           com.google.protobuf.ByteString data)
9487           throws com.google.protobuf.InvalidProtocolBufferException {
9488         return PARSER.parseFrom(data);
9489       }
9490       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom(
9491           com.google.protobuf.ByteString data,
9492           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9493           throws com.google.protobuf.InvalidProtocolBufferException {
9494         return PARSER.parseFrom(data, extensionRegistry);
9495       }
9496       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom(byte[] data)
9497           throws com.google.protobuf.InvalidProtocolBufferException {
9498         return PARSER.parseFrom(data);
9499       }
9500       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom(
9501           byte[] data,
9502           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9503           throws com.google.protobuf.InvalidProtocolBufferException {
9504         return PARSER.parseFrom(data, extensionRegistry);
9505       }
9506       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom(java.io.InputStream input)
9507           throws java.io.IOException {
9508         return PARSER.parseFrom(input);
9509       }
9510       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom(
9511           java.io.InputStream input,
9512           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9513           throws java.io.IOException {
9514         return PARSER.parseFrom(input, extensionRegistry);
9515       }
9516       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseDelimitedFrom(java.io.InputStream input)
9517           throws java.io.IOException {
9518         return PARSER.parseDelimitedFrom(input);
9519       }
9520       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseDelimitedFrom(
9521           java.io.InputStream input,
9522           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9523           throws java.io.IOException {
9524         return PARSER.parseDelimitedFrom(input, extensionRegistry);
9525       }
9526       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom(
9527           com.google.protobuf.CodedInputStream input)
9528           throws java.io.IOException {
9529         return PARSER.parseFrom(input);
9530       }
9531       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom(
9532           com.google.protobuf.CodedInputStream input,
9533           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9534           throws java.io.IOException {
9535         return PARSER.parseFrom(input, extensionRegistry);
9536       }
9537 
9538       public static Builder newBuilder() { return Builder.create(); }
9539       public Builder newBuilderForType() { return newBuilder(); }
9540       public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue prototype) {
9541         return newBuilder().mergeFrom(prototype);
9542       }
9543       public Builder toBuilder() { return newBuilder(this); }
9544 
9545       @java.lang.Override
9546       protected Builder newBuilderForType(
9547           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
9548         Builder builder = new Builder(parent);
9549         return builder;
9550       }
9551       /**
9552        * Protobuf type {@code hbase.pb.MutationProto.ColumnValue}
9553        */
9554       public static final class Builder extends
9555           com.google.protobuf.GeneratedMessage.Builder<Builder>
9556          implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder {
9557         public static final com.google.protobuf.Descriptors.Descriptor
9558             getDescriptor() {
9559           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_ColumnValue_descriptor;
9560         }
9561 
9562         protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
9563             internalGetFieldAccessorTable() {
9564           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_ColumnValue_fieldAccessorTable
9565               .ensureFieldAccessorsInitialized(
9566                   org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder.class);
9567         }
9568 
9569         // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.newBuilder()
9570         private Builder() {
9571           maybeForceBuilderInitialization();
9572         }
9573 
9574         private Builder(
9575             com.google.protobuf.GeneratedMessage.BuilderParent parent) {
9576           super(parent);
9577           maybeForceBuilderInitialization();
9578         }
9579         private void maybeForceBuilderInitialization() {
9580           if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
9581             getQualifierValueFieldBuilder();
9582           }
9583         }
9584         private static Builder create() {
9585           return new Builder();
9586         }
9587 
9588         public Builder clear() {
9589           super.clear();
9590           family_ = com.google.protobuf.ByteString.EMPTY;
9591           bitField0_ = (bitField0_ & ~0x00000001);
9592           if (qualifierValueBuilder_ == null) {
9593             qualifierValue_ = java.util.Collections.emptyList();
9594             bitField0_ = (bitField0_ & ~0x00000002);
9595           } else {
9596             qualifierValueBuilder_.clear();
9597           }
9598           return this;
9599         }
9600 
9601         public Builder clone() {
9602           return create().mergeFrom(buildPartial());
9603         }
9604 
9605         public com.google.protobuf.Descriptors.Descriptor
9606             getDescriptorForType() {
9607           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_ColumnValue_descriptor;
9608         }
9609 
9610         public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue getDefaultInstanceForType() {
9611           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.getDefaultInstance();
9612         }
9613 
9614         public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue build() {
9615           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue result = buildPartial();
9616           if (!result.isInitialized()) {
9617             throw newUninitializedMessageException(result);
9618           }
9619           return result;
9620         }
9621 
9622         public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue buildPartial() {
9623           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue(this);
9624           int from_bitField0_ = bitField0_;
9625           int to_bitField0_ = 0;
9626           if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
9627             to_bitField0_ |= 0x00000001;
9628           }
9629           result.family_ = family_;
9630           if (qualifierValueBuilder_ == null) {
9631             if (((bitField0_ & 0x00000002) == 0x00000002)) {
9632               qualifierValue_ = java.util.Collections.unmodifiableList(qualifierValue_);
9633               bitField0_ = (bitField0_ & ~0x00000002);
9634             }
9635             result.qualifierValue_ = qualifierValue_;
9636           } else {
9637             result.qualifierValue_ = qualifierValueBuilder_.build();
9638           }
9639           result.bitField0_ = to_bitField0_;
9640           onBuilt();
9641           return result;
9642         }
9643 
9644         public Builder mergeFrom(com.google.protobuf.Message other) {
9645           if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue) {
9646             return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue)other);
9647           } else {
9648             super.mergeFrom(other);
9649             return this;
9650           }
9651         }
9652 
9653         public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue other) {
9654           if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.getDefaultInstance()) return this;
9655           if (other.hasFamily()) {
9656             setFamily(other.getFamily());
9657           }
9658           if (qualifierValueBuilder_ == null) {
9659             if (!other.qualifierValue_.isEmpty()) {
9660               if (qualifierValue_.isEmpty()) {
9661                 qualifierValue_ = other.qualifierValue_;
9662                 bitField0_ = (bitField0_ & ~0x00000002);
9663               } else {
9664                 ensureQualifierValueIsMutable();
9665                 qualifierValue_.addAll(other.qualifierValue_);
9666               }
9667               onChanged();
9668             }
9669           } else {
9670             if (!other.qualifierValue_.isEmpty()) {
9671               if (qualifierValueBuilder_.isEmpty()) {
9672                 qualifierValueBuilder_.dispose();
9673                 qualifierValueBuilder_ = null;
9674                 qualifierValue_ = other.qualifierValue_;
9675                 bitField0_ = (bitField0_ & ~0x00000002);
9676                 qualifierValueBuilder_ = 
9677                   com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
9678                      getQualifierValueFieldBuilder() : null;
9679               } else {
9680                 qualifierValueBuilder_.addAllMessages(other.qualifierValue_);
9681               }
9682             }
9683           }
9684           this.mergeUnknownFields(other.getUnknownFields());
9685           return this;
9686         }
9687 
9688         public final boolean isInitialized() {
9689           if (!hasFamily()) {
9690             
9691             return false;
9692           }
9693           return true;
9694         }
9695 
9696         public Builder mergeFrom(
9697             com.google.protobuf.CodedInputStream input,
9698             com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9699             throws java.io.IOException {
9700           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parsedMessage = null;
9701           try {
9702             parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
9703           } catch (com.google.protobuf.InvalidProtocolBufferException e) {
9704             parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue) e.getUnfinishedMessage();
9705             throw e;
9706           } finally {
9707             if (parsedMessage != null) {
9708               mergeFrom(parsedMessage);
9709             }
9710           }
9711           return this;
9712         }
9713         private int bitField0_;
9714 
9715         // required bytes family = 1;
9716         private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY;
9717         /**
9718          * <code>required bytes family = 1;</code>
9719          */
9720         public boolean hasFamily() {
9721           return ((bitField0_ & 0x00000001) == 0x00000001);
9722         }
9723         /**
9724          * <code>required bytes family = 1;</code>
9725          */
9726         public com.google.protobuf.ByteString getFamily() {
9727           return family_;
9728         }
9729         /**
9730          * <code>required bytes family = 1;</code>
9731          */
9732         public Builder setFamily(com.google.protobuf.ByteString value) {
9733           if (value == null) {
9734     throw new NullPointerException();
9735   }
9736   bitField0_ |= 0x00000001;
9737           family_ = value;
9738           onChanged();
9739           return this;
9740         }
9741         /**
9742          * <code>required bytes family = 1;</code>
9743          */
9744         public Builder clearFamily() {
9745           bitField0_ = (bitField0_ & ~0x00000001);
9746           family_ = getDefaultInstance().getFamily();
9747           onChanged();
9748           return this;
9749         }
9750 
9751         // repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;
9752         private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue> qualifierValue_ =
9753           java.util.Collections.emptyList();
9754         private void ensureQualifierValueIsMutable() {
9755           if (!((bitField0_ & 0x00000002) == 0x00000002)) {
9756             qualifierValue_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue>(qualifierValue_);
9757             bitField0_ |= 0x00000002;
9758            }
9759         }
9760 
9761         private com.google.protobuf.RepeatedFieldBuilder<
9762             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder> qualifierValueBuilder_;
9763 
9764         /**
9765          * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
9766          */
9767         public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue> getQualifierValueList() {
9768           if (qualifierValueBuilder_ == null) {
9769             return java.util.Collections.unmodifiableList(qualifierValue_);
9770           } else {
9771             return qualifierValueBuilder_.getMessageList();
9772           }
9773         }
9774         /**
9775          * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
9776          */
9777         public int getQualifierValueCount() {
9778           if (qualifierValueBuilder_ == null) {
9779             return qualifierValue_.size();
9780           } else {
9781             return qualifierValueBuilder_.getCount();
9782           }
9783         }
9784         /**
9785          * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
9786          */
9787         public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue getQualifierValue(int index) {
9788           if (qualifierValueBuilder_ == null) {
9789             return qualifierValue_.get(index);
9790           } else {
9791             return qualifierValueBuilder_.getMessage(index);
9792           }
9793         }
9794         /**
9795          * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
9796          */
9797         public Builder setQualifierValue(
9798             int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue value) {
9799           if (qualifierValueBuilder_ == null) {
9800             if (value == null) {
9801               throw new NullPointerException();
9802             }
9803             ensureQualifierValueIsMutable();
9804             qualifierValue_.set(index, value);
9805             onChanged();
9806           } else {
9807             qualifierValueBuilder_.setMessage(index, value);
9808           }
9809           return this;
9810         }
9811         /**
9812          * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
9813          */
9814         public Builder setQualifierValue(
9815             int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder builderForValue) {
9816           if (qualifierValueBuilder_ == null) {
9817             ensureQualifierValueIsMutable();
9818             qualifierValue_.set(index, builderForValue.build());
9819             onChanged();
9820           } else {
9821             qualifierValueBuilder_.setMessage(index, builderForValue.build());
9822           }
9823           return this;
9824         }
9825         /**
9826          * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
9827          */
9828         public Builder addQualifierValue(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue value) {
9829           if (qualifierValueBuilder_ == null) {
9830             if (value == null) {
9831               throw new NullPointerException();
9832             }
9833             ensureQualifierValueIsMutable();
9834             qualifierValue_.add(value);
9835             onChanged();
9836           } else {
9837             qualifierValueBuilder_.addMessage(value);
9838           }
9839           return this;
9840         }
9841         /**
9842          * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
9843          */
9844         public Builder addQualifierValue(
9845             int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue value) {
9846           if (qualifierValueBuilder_ == null) {
9847             if (value == null) {
9848               throw new NullPointerException();
9849             }
9850             ensureQualifierValueIsMutable();
9851             qualifierValue_.add(index, value);
9852             onChanged();
9853           } else {
9854             qualifierValueBuilder_.addMessage(index, value);
9855           }
9856           return this;
9857         }
9858         /**
9859          * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
9860          */
9861         public Builder addQualifierValue(
9862             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder builderForValue) {
9863           if (qualifierValueBuilder_ == null) {
9864             ensureQualifierValueIsMutable();
9865             qualifierValue_.add(builderForValue.build());
9866             onChanged();
9867           } else {
9868             qualifierValueBuilder_.addMessage(builderForValue.build());
9869           }
9870           return this;
9871         }
9872         /**
9873          * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
9874          */
9875         public Builder addQualifierValue(
9876             int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder builderForValue) {
9877           if (qualifierValueBuilder_ == null) {
9878             ensureQualifierValueIsMutable();
9879             qualifierValue_.add(index, builderForValue.build());
9880             onChanged();
9881           } else {
9882             qualifierValueBuilder_.addMessage(index, builderForValue.build());
9883           }
9884           return this;
9885         }
9886         /**
9887          * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
9888          */
9889         public Builder addAllQualifierValue(
9890             java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue> values) {
9891           if (qualifierValueBuilder_ == null) {
9892             ensureQualifierValueIsMutable();
9893             super.addAll(values, qualifierValue_);
9894             onChanged();
9895           } else {
9896             qualifierValueBuilder_.addAllMessages(values);
9897           }
9898           return this;
9899         }
9900         /**
9901          * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
9902          */
9903         public Builder clearQualifierValue() {
9904           if (qualifierValueBuilder_ == null) {
9905             qualifierValue_ = java.util.Collections.emptyList();
9906             bitField0_ = (bitField0_ & ~0x00000002);
9907             onChanged();
9908           } else {
9909             qualifierValueBuilder_.clear();
9910           }
9911           return this;
9912         }
9913         /**
9914          * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
9915          */
9916         public Builder removeQualifierValue(int index) {
9917           if (qualifierValueBuilder_ == null) {
9918             ensureQualifierValueIsMutable();
9919             qualifierValue_.remove(index);
9920             onChanged();
9921           } else {
9922             qualifierValueBuilder_.remove(index);
9923           }
9924           return this;
9925         }
9926         /**
9927          * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
9928          */
9929         public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder getQualifierValueBuilder(
9930             int index) {
9931           return getQualifierValueFieldBuilder().getBuilder(index);
9932         }
9933         /**
9934          * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
9935          */
9936         public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder getQualifierValueOrBuilder(
9937             int index) {
9938           if (qualifierValueBuilder_ == null) {
9939             return qualifierValue_.get(index);  } else {
9940             return qualifierValueBuilder_.getMessageOrBuilder(index);
9941           }
9942         }
9943         /**
9944          * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
9945          */
9946         public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder> 
9947              getQualifierValueOrBuilderList() {
9948           if (qualifierValueBuilder_ != null) {
9949             return qualifierValueBuilder_.getMessageOrBuilderList();
9950           } else {
9951             return java.util.Collections.unmodifiableList(qualifierValue_);
9952           }
9953         }
9954         /**
9955          * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
9956          */
9957         public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder addQualifierValueBuilder() {
9958           return getQualifierValueFieldBuilder().addBuilder(
9959               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.getDefaultInstance());
9960         }
9961         /**
9962          * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
9963          */
9964         public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder addQualifierValueBuilder(
9965             int index) {
9966           return getQualifierValueFieldBuilder().addBuilder(
9967               index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.getDefaultInstance());
9968         }
9969         /**
9970          * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
9971          */
9972         public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder> 
9973              getQualifierValueBuilderList() {
9974           return getQualifierValueFieldBuilder().getBuilderList();
9975         }
9976         private com.google.protobuf.RepeatedFieldBuilder<
9977             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder> 
9978             getQualifierValueFieldBuilder() {
9979           if (qualifierValueBuilder_ == null) {
9980             qualifierValueBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
9981                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder>(
9982                     qualifierValue_,
9983                     ((bitField0_ & 0x00000002) == 0x00000002),
9984                     getParentForChildren(),
9985                     isClean());
9986             qualifierValue_ = null;
9987           }
9988           return qualifierValueBuilder_;
9989         }
9990 
9991         // @@protoc_insertion_point(builder_scope:hbase.pb.MutationProto.ColumnValue)
9992       }
9993 
9994       static {
9995         defaultInstance = new ColumnValue(true);
9996         defaultInstance.initFields();
9997       }
9998 
9999       // @@protoc_insertion_point(class_scope:hbase.pb.MutationProto.ColumnValue)
10000     }
10001 
10002     private int bitField0_;
10003     // optional bytes row = 1;
10004     public static final int ROW_FIELD_NUMBER = 1;
10005     private com.google.protobuf.ByteString row_;
10006     /**
10007      * <code>optional bytes row = 1;</code>
10008      */
10009     public boolean hasRow() {
10010       return ((bitField0_ & 0x00000001) == 0x00000001);
10011     }
10012     /**
10013      * <code>optional bytes row = 1;</code>
10014      */
10015     public com.google.protobuf.ByteString getRow() {
10016       return row_;
10017     }
10018 
10019     // optional .hbase.pb.MutationProto.MutationType mutate_type = 2;
10020     public static final int MUTATE_TYPE_FIELD_NUMBER = 2;
10021     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType mutateType_;
10022     /**
10023      * <code>optional .hbase.pb.MutationProto.MutationType mutate_type = 2;</code>
10024      */
10025     public boolean hasMutateType() {
10026       return ((bitField0_ & 0x00000002) == 0x00000002);
10027     }
10028     /**
10029      * <code>optional .hbase.pb.MutationProto.MutationType mutate_type = 2;</code>
10030      */
10031     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType getMutateType() {
10032       return mutateType_;
10033     }
10034 
10035     // repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;
10036     public static final int COLUMN_VALUE_FIELD_NUMBER = 3;
10037     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue> columnValue_;
10038     /**
10039      * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
10040      */
10041     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue> getColumnValueList() {
10042       return columnValue_;
10043     }
10044     /**
10045      * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
10046      */
10047     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder> 
10048         getColumnValueOrBuilderList() {
10049       return columnValue_;
10050     }
10051     /**
10052      * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
10053      */
10054     public int getColumnValueCount() {
10055       return columnValue_.size();
10056     }
10057     /**
10058      * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
10059      */
10060     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue getColumnValue(int index) {
10061       return columnValue_.get(index);
10062     }
10063     /**
10064      * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
10065      */
10066     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder getColumnValueOrBuilder(
10067         int index) {
10068       return columnValue_.get(index);
10069     }
10070 
10071     // optional uint64 timestamp = 4;
10072     public static final int TIMESTAMP_FIELD_NUMBER = 4;
10073     private long timestamp_;
10074     /**
10075      * <code>optional uint64 timestamp = 4;</code>
10076      */
10077     public boolean hasTimestamp() {
10078       return ((bitField0_ & 0x00000004) == 0x00000004);
10079     }
10080     /**
10081      * <code>optional uint64 timestamp = 4;</code>
10082      */
10083     public long getTimestamp() {
10084       return timestamp_;
10085     }
10086 
10087     // repeated .hbase.pb.NameBytesPair attribute = 5;
10088     public static final int ATTRIBUTE_FIELD_NUMBER = 5;
10089     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> attribute_;
10090     /**
10091      * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
10092      */
10093     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> getAttributeList() {
10094       return attribute_;
10095     }
10096     /**
10097      * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
10098      */
10099     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> 
10100         getAttributeOrBuilderList() {
10101       return attribute_;
10102     }
10103     /**
10104      * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
10105      */
10106     public int getAttributeCount() {
10107       return attribute_.size();
10108     }
10109     /**
10110      * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
10111      */
10112     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) {
10113       return attribute_.get(index);
10114     }
10115     /**
10116      * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
10117      */
10118     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder(
10119         int index) {
10120       return attribute_.get(index);
10121     }
10122 
10123     // optional .hbase.pb.MutationProto.Durability durability = 6 [default = USE_DEFAULT];
10124     public static final int DURABILITY_FIELD_NUMBER = 6;
10125     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability durability_;
10126     /**
10127      * <code>optional .hbase.pb.MutationProto.Durability durability = 6 [default = USE_DEFAULT];</code>
10128      */
10129     public boolean hasDurability() {
10130       return ((bitField0_ & 0x00000008) == 0x00000008);
10131     }
10132     /**
10133      * <code>optional .hbase.pb.MutationProto.Durability durability = 6 [default = USE_DEFAULT];</code>
10134      */
10135     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability getDurability() {
10136       return durability_;
10137     }
10138 
10139     // optional .hbase.pb.TimeRange time_range = 7;
10140     public static final int TIME_RANGE_FIELD_NUMBER = 7;
10141     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_;
10142     /**
10143      * <code>optional .hbase.pb.TimeRange time_range = 7;</code>
10144      *
10145      * <pre>
10146      * For some mutations, a result may be returned, in which case,
10147      * time range can be specified for potential performance gain
10148      * </pre>
10149      */
10150     public boolean hasTimeRange() {
10151       return ((bitField0_ & 0x00000010) == 0x00000010);
10152     }
10153     /**
10154      * <code>optional .hbase.pb.TimeRange time_range = 7;</code>
10155      *
10156      * <pre>
10157      * For some mutations, a result may be returned, in which case,
10158      * time range can be specified for potential performance gain
10159      * </pre>
10160      */
10161     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() {
10162       return timeRange_;
10163     }
10164     /**
10165      * <code>optional .hbase.pb.TimeRange time_range = 7;</code>
10166      *
10167      * <pre>
10168      * For some mutations, a result may be returned, in which case,
10169      * time range can be specified for potential performance gain
10170      * </pre>
10171      */
10172     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() {
10173       return timeRange_;
10174     }
10175 
10176     // optional int32 associated_cell_count = 8;
10177     public static final int ASSOCIATED_CELL_COUNT_FIELD_NUMBER = 8;
10178     private int associatedCellCount_;
10179     /**
10180      * <code>optional int32 associated_cell_count = 8;</code>
10181      *
10182      * <pre>
10183      * The below count is set when the associated cells are NOT
10184      * part of this protobuf message; they are passed alongside
10185      * and then this Message is a placeholder with metadata.  The
10186      * count is needed to know how many to peel off the block of Cells as
10187      * ours.  NOTE: This is different from the pb managed cell_count of the
10188      * 'cell' field above which is non-null when the cells are pb'd.
10189      * </pre>
10190      */
10191     public boolean hasAssociatedCellCount() {
10192       return ((bitField0_ & 0x00000020) == 0x00000020);
10193     }
10194     /**
10195      * <code>optional int32 associated_cell_count = 8;</code>
10196      *
10197      * <pre>
10198      * The below count is set when the associated cells are NOT
10199      * part of this protobuf message; they are passed alongside
10200      * and then this Message is a placeholder with metadata.  The
10201      * count is needed to know how many to peel off the block of Cells as
10202      * ours.  NOTE: This is different from the pb managed cell_count of the
10203      * 'cell' field above which is non-null when the cells are pb'd.
10204      * </pre>
10205      */
10206     public int getAssociatedCellCount() {
10207       return associatedCellCount_;
10208     }
10209 
10210     // optional uint64 nonce = 9;
10211     public static final int NONCE_FIELD_NUMBER = 9;
10212     private long nonce_;
10213     /**
10214      * <code>optional uint64 nonce = 9;</code>
10215      */
10216     public boolean hasNonce() {
10217       return ((bitField0_ & 0x00000040) == 0x00000040);
10218     }
10219     /**
10220      * <code>optional uint64 nonce = 9;</code>
10221      */
10222     public long getNonce() {
10223       return nonce_;
10224     }
10225 
10226     private void initFields() {
10227       row_ = com.google.protobuf.ByteString.EMPTY;
10228       mutateType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType.APPEND;
10229       columnValue_ = java.util.Collections.emptyList();
10230       timestamp_ = 0L;
10231       attribute_ = java.util.Collections.emptyList();
10232       durability_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability.USE_DEFAULT;
10233       timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance();
10234       associatedCellCount_ = 0;
10235       nonce_ = 0L;
10236     }
10237     private byte memoizedIsInitialized = -1;
10238     public final boolean isInitialized() {
10239       byte isInitialized = memoizedIsInitialized;
10240       if (isInitialized != -1) return isInitialized == 1;
10241 
10242       for (int i = 0; i < getColumnValueCount(); i++) {
10243         if (!getColumnValue(i).isInitialized()) {
10244           memoizedIsInitialized = 0;
10245           return false;
10246         }
10247       }
10248       for (int i = 0; i < getAttributeCount(); i++) {
10249         if (!getAttribute(i).isInitialized()) {
10250           memoizedIsInitialized = 0;
10251           return false;
10252         }
10253       }
10254       memoizedIsInitialized = 1;
10255       return true;
10256     }
10257 
10258     public void writeTo(com.google.protobuf.CodedOutputStream output)
10259                         throws java.io.IOException {
10260       getSerializedSize();
10261       if (((bitField0_ & 0x00000001) == 0x00000001)) {
10262         output.writeBytes(1, row_);
10263       }
10264       if (((bitField0_ & 0x00000002) == 0x00000002)) {
10265         output.writeEnum(2, mutateType_.getNumber());
10266       }
10267       for (int i = 0; i < columnValue_.size(); i++) {
10268         output.writeMessage(3, columnValue_.get(i));
10269       }
10270       if (((bitField0_ & 0x00000004) == 0x00000004)) {
10271         output.writeUInt64(4, timestamp_);
10272       }
10273       for (int i = 0; i < attribute_.size(); i++) {
10274         output.writeMessage(5, attribute_.get(i));
10275       }
10276       if (((bitField0_ & 0x00000008) == 0x00000008)) {
10277         output.writeEnum(6, durability_.getNumber());
10278       }
10279       if (((bitField0_ & 0x00000010) == 0x00000010)) {
10280         output.writeMessage(7, timeRange_);
10281       }
10282       if (((bitField0_ & 0x00000020) == 0x00000020)) {
10283         output.writeInt32(8, associatedCellCount_);
10284       }
10285       if (((bitField0_ & 0x00000040) == 0x00000040)) {
10286         output.writeUInt64(9, nonce_);
10287       }
10288       getUnknownFields().writeTo(output);
10289     }
10290 
10291     private int memoizedSerializedSize = -1;
10292     public int getSerializedSize() {
10293       int size = memoizedSerializedSize;
10294       if (size != -1) return size;
10295 
10296       size = 0;
10297       if (((bitField0_ & 0x00000001) == 0x00000001)) {
10298         size += com.google.protobuf.CodedOutputStream
10299           .computeBytesSize(1, row_);
10300       }
10301       if (((bitField0_ & 0x00000002) == 0x00000002)) {
10302         size += com.google.protobuf.CodedOutputStream
10303           .computeEnumSize(2, mutateType_.getNumber());
10304       }
10305       for (int i = 0; i < columnValue_.size(); i++) {
10306         size += com.google.protobuf.CodedOutputStream
10307           .computeMessageSize(3, columnValue_.get(i));
10308       }
10309       if (((bitField0_ & 0x00000004) == 0x00000004)) {
10310         size += com.google.protobuf.CodedOutputStream
10311           .computeUInt64Size(4, timestamp_);
10312       }
10313       for (int i = 0; i < attribute_.size(); i++) {
10314         size += com.google.protobuf.CodedOutputStream
10315           .computeMessageSize(5, attribute_.get(i));
10316       }
10317       if (((bitField0_ & 0x00000008) == 0x00000008)) {
10318         size += com.google.protobuf.CodedOutputStream
10319           .computeEnumSize(6, durability_.getNumber());
10320       }
10321       if (((bitField0_ & 0x00000010) == 0x00000010)) {
10322         size += com.google.protobuf.CodedOutputStream
10323           .computeMessageSize(7, timeRange_);
10324       }
10325       if (((bitField0_ & 0x00000020) == 0x00000020)) {
10326         size += com.google.protobuf.CodedOutputStream
10327           .computeInt32Size(8, associatedCellCount_);
10328       }
10329       if (((bitField0_ & 0x00000040) == 0x00000040)) {
10330         size += com.google.protobuf.CodedOutputStream
10331           .computeUInt64Size(9, nonce_);
10332       }
10333       size += getUnknownFields().getSerializedSize();
10334       memoizedSerializedSize = size;
10335       return size;
10336     }
10337 
10338     private static final long serialVersionUID = 0L;
10339     @java.lang.Override
10340     protected java.lang.Object writeReplace()
10341         throws java.io.ObjectStreamException {
10342       return super.writeReplace();
10343     }
10344 
10345     @java.lang.Override
10346     public boolean equals(final java.lang.Object obj) {
10347       if (obj == this) {
10348        return true;
10349       }
10350       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto)) {
10351         return super.equals(obj);
10352       }
10353       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto) obj;
10354 
10355       boolean result = true;
10356       result = result && (hasRow() == other.hasRow());
10357       if (hasRow()) {
10358         result = result && getRow()
10359             .equals(other.getRow());
10360       }
10361       result = result && (hasMutateType() == other.hasMutateType());
10362       if (hasMutateType()) {
10363         result = result &&
10364             (getMutateType() == other.getMutateType());
10365       }
10366       result = result && getColumnValueList()
10367           .equals(other.getColumnValueList());
10368       result = result && (hasTimestamp() == other.hasTimestamp());
10369       if (hasTimestamp()) {
10370         result = result && (getTimestamp()
10371             == other.getTimestamp());
10372       }
10373       result = result && getAttributeList()
10374           .equals(other.getAttributeList());
10375       result = result && (hasDurability() == other.hasDurability());
10376       if (hasDurability()) {
10377         result = result &&
10378             (getDurability() == other.getDurability());
10379       }
10380       result = result && (hasTimeRange() == other.hasTimeRange());
10381       if (hasTimeRange()) {
10382         result = result && getTimeRange()
10383             .equals(other.getTimeRange());
10384       }
10385       result = result && (hasAssociatedCellCount() == other.hasAssociatedCellCount());
10386       if (hasAssociatedCellCount()) {
10387         result = result && (getAssociatedCellCount()
10388             == other.getAssociatedCellCount());
10389       }
10390       result = result && (hasNonce() == other.hasNonce());
10391       if (hasNonce()) {
10392         result = result && (getNonce()
10393             == other.getNonce());
10394       }
10395       result = result &&
10396           getUnknownFields().equals(other.getUnknownFields());
10397       return result;
10398     }
10399 
10400     private int memoizedHashCode = 0;
10401     @java.lang.Override
10402     public int hashCode() {
10403       if (memoizedHashCode != 0) {
10404         return memoizedHashCode;
10405       }
10406       int hash = 41;
10407       hash = (19 * hash) + getDescriptorForType().hashCode();
10408       if (hasRow()) {
10409         hash = (37 * hash) + ROW_FIELD_NUMBER;
10410         hash = (53 * hash) + getRow().hashCode();
10411       }
10412       if (hasMutateType()) {
10413         hash = (37 * hash) + MUTATE_TYPE_FIELD_NUMBER;
10414         hash = (53 * hash) + hashEnum(getMutateType());
10415       }
10416       if (getColumnValueCount() > 0) {
10417         hash = (37 * hash) + COLUMN_VALUE_FIELD_NUMBER;
10418         hash = (53 * hash) + getColumnValueList().hashCode();
10419       }
10420       if (hasTimestamp()) {
10421         hash = (37 * hash) + TIMESTAMP_FIELD_NUMBER;
10422         hash = (53 * hash) + hashLong(getTimestamp());
10423       }
10424       if (getAttributeCount() > 0) {
10425         hash = (37 * hash) + ATTRIBUTE_FIELD_NUMBER;
10426         hash = (53 * hash) + getAttributeList().hashCode();
10427       }
10428       if (hasDurability()) {
10429         hash = (37 * hash) + DURABILITY_FIELD_NUMBER;
10430         hash = (53 * hash) + hashEnum(getDurability());
10431       }
10432       if (hasTimeRange()) {
10433         hash = (37 * hash) + TIME_RANGE_FIELD_NUMBER;
10434         hash = (53 * hash) + getTimeRange().hashCode();
10435       }
10436       if (hasAssociatedCellCount()) {
10437         hash = (37 * hash) + ASSOCIATED_CELL_COUNT_FIELD_NUMBER;
10438         hash = (53 * hash) + getAssociatedCellCount();
10439       }
10440       if (hasNonce()) {
10441         hash = (37 * hash) + NONCE_FIELD_NUMBER;
10442         hash = (53 * hash) + hashLong(getNonce());
10443       }
10444       hash = (29 * hash) + getUnknownFields().hashCode();
10445       memoizedHashCode = hash;
10446       return hash;
10447     }
10448 
10449     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseFrom(
10450         com.google.protobuf.ByteString data)
10451         throws com.google.protobuf.InvalidProtocolBufferException {
10452       return PARSER.parseFrom(data);
10453     }
10454     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseFrom(
10455         com.google.protobuf.ByteString data,
10456         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10457         throws com.google.protobuf.InvalidProtocolBufferException {
10458       return PARSER.parseFrom(data, extensionRegistry);
10459     }
10460     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseFrom(byte[] data)
10461         throws com.google.protobuf.InvalidProtocolBufferException {
10462       return PARSER.parseFrom(data);
10463     }
10464     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseFrom(
10465         byte[] data,
10466         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10467         throws com.google.protobuf.InvalidProtocolBufferException {
10468       return PARSER.parseFrom(data, extensionRegistry);
10469     }
10470     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseFrom(java.io.InputStream input)
10471         throws java.io.IOException {
10472       return PARSER.parseFrom(input);
10473     }
10474     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseFrom(
10475         java.io.InputStream input,
10476         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10477         throws java.io.IOException {
10478       return PARSER.parseFrom(input, extensionRegistry);
10479     }
10480     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseDelimitedFrom(java.io.InputStream input)
10481         throws java.io.IOException {
10482       return PARSER.parseDelimitedFrom(input);
10483     }
10484     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseDelimitedFrom(
10485         java.io.InputStream input,
10486         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10487         throws java.io.IOException {
10488       return PARSER.parseDelimitedFrom(input, extensionRegistry);
10489     }
10490     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseFrom(
10491         com.google.protobuf.CodedInputStream input)
10492         throws java.io.IOException {
10493       return PARSER.parseFrom(input);
10494     }
10495     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseFrom(
10496         com.google.protobuf.CodedInputStream input,
10497         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10498         throws java.io.IOException {
10499       return PARSER.parseFrom(input, extensionRegistry);
10500     }
10501 
10502     public static Builder newBuilder() { return Builder.create(); }
10503     public Builder newBuilderForType() { return newBuilder(); }
10504     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto prototype) {
10505       return newBuilder().mergeFrom(prototype);
10506     }
10507     public Builder toBuilder() { return newBuilder(this); }
10508 
10509     @java.lang.Override
10510     protected Builder newBuilderForType(
10511         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
10512       Builder builder = new Builder(parent);
10513       return builder;
10514     }
10515     /**
10516      * Protobuf type {@code hbase.pb.MutationProto}
10517      *
10518      * <pre>
10519      **
10520      * A specific mutation inside a mutate request.
10521      * It can be an append, increment, put or delete based
10522      * on the mutation type.  It can be fully filled in or
10523      * only metadata present because data is being carried
10524      * elsewhere outside of pb.
10525      * </pre>
10526      */
10527     public static final class Builder extends
10528         com.google.protobuf.GeneratedMessage.Builder<Builder>
10529        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder {
10530       public static final com.google.protobuf.Descriptors.Descriptor
10531           getDescriptor() {
10532         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_descriptor;
10533       }
10534 
10535       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
10536           internalGetFieldAccessorTable() {
10537         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_fieldAccessorTable
10538             .ensureFieldAccessorsInitialized(
10539                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder.class);
10540       }
10541 
10542       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.newBuilder()
10543       private Builder() {
10544         maybeForceBuilderInitialization();
10545       }
10546 
10547       private Builder(
10548           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
10549         super(parent);
10550         maybeForceBuilderInitialization();
10551       }
10552       private void maybeForceBuilderInitialization() {
10553         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
10554           getColumnValueFieldBuilder();
10555           getAttributeFieldBuilder();
10556           getTimeRangeFieldBuilder();
10557         }
10558       }
10559       private static Builder create() {
10560         return new Builder();
10561       }
10562 
10563       public Builder clear() {
10564         super.clear();
10565         row_ = com.google.protobuf.ByteString.EMPTY;
10566         bitField0_ = (bitField0_ & ~0x00000001);
10567         mutateType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType.APPEND;
10568         bitField0_ = (bitField0_ & ~0x00000002);
10569         if (columnValueBuilder_ == null) {
10570           columnValue_ = java.util.Collections.emptyList();
10571           bitField0_ = (bitField0_ & ~0x00000004);
10572         } else {
10573           columnValueBuilder_.clear();
10574         }
10575         timestamp_ = 0L;
10576         bitField0_ = (bitField0_ & ~0x00000008);
10577         if (attributeBuilder_ == null) {
10578           attribute_ = java.util.Collections.emptyList();
10579           bitField0_ = (bitField0_ & ~0x00000010);
10580         } else {
10581           attributeBuilder_.clear();
10582         }
10583         durability_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability.USE_DEFAULT;
10584         bitField0_ = (bitField0_ & ~0x00000020);
10585         if (timeRangeBuilder_ == null) {
10586           timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance();
10587         } else {
10588           timeRangeBuilder_.clear();
10589         }
10590         bitField0_ = (bitField0_ & ~0x00000040);
10591         associatedCellCount_ = 0;
10592         bitField0_ = (bitField0_ & ~0x00000080);
10593         nonce_ = 0L;
10594         bitField0_ = (bitField0_ & ~0x00000100);
10595         return this;
10596       }
10597 
10598       public Builder clone() {
10599         return create().mergeFrom(buildPartial());
10600       }
10601 
10602       public com.google.protobuf.Descriptors.Descriptor
10603           getDescriptorForType() {
10604         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_descriptor;
10605       }
10606 
10607       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getDefaultInstanceForType() {
10608         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance();
10609       }
10610 
10611       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto build() {
10612         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto result = buildPartial();
10613         if (!result.isInitialized()) {
10614           throw newUninitializedMessageException(result);
10615         }
10616         return result;
10617       }
10618 
10619       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto buildPartial() {
10620         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto(this);
10621         int from_bitField0_ = bitField0_;
10622         int to_bitField0_ = 0;
10623         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
10624           to_bitField0_ |= 0x00000001;
10625         }
10626         result.row_ = row_;
10627         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
10628           to_bitField0_ |= 0x00000002;
10629         }
10630         result.mutateType_ = mutateType_;
10631         if (columnValueBuilder_ == null) {
10632           if (((bitField0_ & 0x00000004) == 0x00000004)) {
10633             columnValue_ = java.util.Collections.unmodifiableList(columnValue_);
10634             bitField0_ = (bitField0_ & ~0x00000004);
10635           }
10636           result.columnValue_ = columnValue_;
10637         } else {
10638           result.columnValue_ = columnValueBuilder_.build();
10639         }
10640         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
10641           to_bitField0_ |= 0x00000004;
10642         }
10643         result.timestamp_ = timestamp_;
10644         if (attributeBuilder_ == null) {
10645           if (((bitField0_ & 0x00000010) == 0x00000010)) {
10646             attribute_ = java.util.Collections.unmodifiableList(attribute_);
10647             bitField0_ = (bitField0_ & ~0x00000010);
10648           }
10649           result.attribute_ = attribute_;
10650         } else {
10651           result.attribute_ = attributeBuilder_.build();
10652         }
10653         if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
10654           to_bitField0_ |= 0x00000008;
10655         }
10656         result.durability_ = durability_;
10657         if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
10658           to_bitField0_ |= 0x00000010;
10659         }
10660         if (timeRangeBuilder_ == null) {
10661           result.timeRange_ = timeRange_;
10662         } else {
10663           result.timeRange_ = timeRangeBuilder_.build();
10664         }
10665         if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
10666           to_bitField0_ |= 0x00000020;
10667         }
10668         result.associatedCellCount_ = associatedCellCount_;
10669         if (((from_bitField0_ & 0x00000100) == 0x00000100)) {
10670           to_bitField0_ |= 0x00000040;
10671         }
10672         result.nonce_ = nonce_;
10673         result.bitField0_ = to_bitField0_;
10674         onBuilt();
10675         return result;
10676       }
10677 
10678       public Builder mergeFrom(com.google.protobuf.Message other) {
10679         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto) {
10680           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto)other);
10681         } else {
10682           super.mergeFrom(other);
10683           return this;
10684         }
10685       }
10686 
10687       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto other) {
10688         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance()) return this;
10689         if (other.hasRow()) {
10690           setRow(other.getRow());
10691         }
10692         if (other.hasMutateType()) {
10693           setMutateType(other.getMutateType());
10694         }
10695         if (columnValueBuilder_ == null) {
10696           if (!other.columnValue_.isEmpty()) {
10697             if (columnValue_.isEmpty()) {
10698               columnValue_ = other.columnValue_;
10699               bitField0_ = (bitField0_ & ~0x00000004);
10700             } else {
10701               ensureColumnValueIsMutable();
10702               columnValue_.addAll(other.columnValue_);
10703             }
10704             onChanged();
10705           }
10706         } else {
10707           if (!other.columnValue_.isEmpty()) {
10708             if (columnValueBuilder_.isEmpty()) {
10709               columnValueBuilder_.dispose();
10710               columnValueBuilder_ = null;
10711               columnValue_ = other.columnValue_;
10712               bitField0_ = (bitField0_ & ~0x00000004);
10713               columnValueBuilder_ = 
10714                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
10715                    getColumnValueFieldBuilder() : null;
10716             } else {
10717               columnValueBuilder_.addAllMessages(other.columnValue_);
10718             }
10719           }
10720         }
10721         if (other.hasTimestamp()) {
10722           setTimestamp(other.getTimestamp());
10723         }
10724         if (attributeBuilder_ == null) {
10725           if (!other.attribute_.isEmpty()) {
10726             if (attribute_.isEmpty()) {
10727               attribute_ = other.attribute_;
10728               bitField0_ = (bitField0_ & ~0x00000010);
10729             } else {
10730               ensureAttributeIsMutable();
10731               attribute_.addAll(other.attribute_);
10732             }
10733             onChanged();
10734           }
10735         } else {
10736           if (!other.attribute_.isEmpty()) {
10737             if (attributeBuilder_.isEmpty()) {
10738               attributeBuilder_.dispose();
10739               attributeBuilder_ = null;
10740               attribute_ = other.attribute_;
10741               bitField0_ = (bitField0_ & ~0x00000010);
10742               attributeBuilder_ = 
10743                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
10744                    getAttributeFieldBuilder() : null;
10745             } else {
10746               attributeBuilder_.addAllMessages(other.attribute_);
10747             }
10748           }
10749         }
10750         if (other.hasDurability()) {
10751           setDurability(other.getDurability());
10752         }
10753         if (other.hasTimeRange()) {
10754           mergeTimeRange(other.getTimeRange());
10755         }
10756         if (other.hasAssociatedCellCount()) {
10757           setAssociatedCellCount(other.getAssociatedCellCount());
10758         }
10759         if (other.hasNonce()) {
10760           setNonce(other.getNonce());
10761         }
10762         this.mergeUnknownFields(other.getUnknownFields());
10763         return this;
10764       }
10765 
10766       public final boolean isInitialized() {
10767         for (int i = 0; i < getColumnValueCount(); i++) {
10768           if (!getColumnValue(i).isInitialized()) {
10769             
10770             return false;
10771           }
10772         }
10773         for (int i = 0; i < getAttributeCount(); i++) {
10774           if (!getAttribute(i).isInitialized()) {
10775             
10776             return false;
10777           }
10778         }
10779         return true;
10780       }
10781 
10782       public Builder mergeFrom(
10783           com.google.protobuf.CodedInputStream input,
10784           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10785           throws java.io.IOException {
10786         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parsedMessage = null;
10787         try {
10788           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
10789         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
10790           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto) e.getUnfinishedMessage();
10791           throw e;
10792         } finally {
10793           if (parsedMessage != null) {
10794             mergeFrom(parsedMessage);
10795           }
10796         }
10797         return this;
10798       }
10799       private int bitField0_;
10800 
10801       // optional bytes row = 1;
10802       private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY;
10803       /**
10804        * <code>optional bytes row = 1;</code>
10805        */
10806       public boolean hasRow() {
10807         return ((bitField0_ & 0x00000001) == 0x00000001);
10808       }
10809       /**
10810        * <code>optional bytes row = 1;</code>
10811        */
10812       public com.google.protobuf.ByteString getRow() {
10813         return row_;
10814       }
10815       /**
10816        * <code>optional bytes row = 1;</code>
10817        */
10818       public Builder setRow(com.google.protobuf.ByteString value) {
10819         if (value == null) {
10820     throw new NullPointerException();
10821   }
10822   bitField0_ |= 0x00000001;
10823         row_ = value;
10824         onChanged();
10825         return this;
10826       }
10827       /**
10828        * <code>optional bytes row = 1;</code>
10829        */
10830       public Builder clearRow() {
10831         bitField0_ = (bitField0_ & ~0x00000001);
10832         row_ = getDefaultInstance().getRow();
10833         onChanged();
10834         return this;
10835       }
10836 
10837       // optional .hbase.pb.MutationProto.MutationType mutate_type = 2;
10838       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType mutateType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType.APPEND;
10839       /**
10840        * <code>optional .hbase.pb.MutationProto.MutationType mutate_type = 2;</code>
10841        */
10842       public boolean hasMutateType() {
10843         return ((bitField0_ & 0x00000002) == 0x00000002);
10844       }
10845       /**
10846        * <code>optional .hbase.pb.MutationProto.MutationType mutate_type = 2;</code>
10847        */
10848       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType getMutateType() {
10849         return mutateType_;
10850       }
10851       /**
10852        * <code>optional .hbase.pb.MutationProto.MutationType mutate_type = 2;</code>
10853        */
10854       public Builder setMutateType(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType value) {
10855         if (value == null) {
10856           throw new NullPointerException();
10857         }
10858         bitField0_ |= 0x00000002;
10859         mutateType_ = value;
10860         onChanged();
10861         return this;
10862       }
10863       /**
10864        * <code>optional .hbase.pb.MutationProto.MutationType mutate_type = 2;</code>
10865        */
10866       public Builder clearMutateType() {
10867         bitField0_ = (bitField0_ & ~0x00000002);
10868         mutateType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType.APPEND;
10869         onChanged();
10870         return this;
10871       }
10872 
10873       // repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;
10874       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue> columnValue_ =
10875         java.util.Collections.emptyList();
10876       private void ensureColumnValueIsMutable() {
10877         if (!((bitField0_ & 0x00000004) == 0x00000004)) {
10878           columnValue_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue>(columnValue_);
10879           bitField0_ |= 0x00000004;
10880          }
10881       }
10882 
10883       private com.google.protobuf.RepeatedFieldBuilder<
10884           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder> columnValueBuilder_;
10885 
10886       /**
10887        * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
10888        */
10889       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue> getColumnValueList() {
10890         if (columnValueBuilder_ == null) {
10891           return java.util.Collections.unmodifiableList(columnValue_);
10892         } else {
10893           return columnValueBuilder_.getMessageList();
10894         }
10895       }
10896       /**
10897        * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
10898        */
10899       public int getColumnValueCount() {
10900         if (columnValueBuilder_ == null) {
10901           return columnValue_.size();
10902         } else {
10903           return columnValueBuilder_.getCount();
10904         }
10905       }
10906       /**
10907        * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
10908        */
10909       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue getColumnValue(int index) {
10910         if (columnValueBuilder_ == null) {
10911           return columnValue_.get(index);
10912         } else {
10913           return columnValueBuilder_.getMessage(index);
10914         }
10915       }
10916       /**
10917        * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
10918        */
10919       public Builder setColumnValue(
10920           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue value) {
10921         if (columnValueBuilder_ == null) {
10922           if (value == null) {
10923             throw new NullPointerException();
10924           }
10925           ensureColumnValueIsMutable();
10926           columnValue_.set(index, value);
10927           onChanged();
10928         } else {
10929           columnValueBuilder_.setMessage(index, value);
10930         }
10931         return this;
10932       }
10933       /**
10934        * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
10935        */
10936       public Builder setColumnValue(
10937           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder builderForValue) {
10938         if (columnValueBuilder_ == null) {
10939           ensureColumnValueIsMutable();
10940           columnValue_.set(index, builderForValue.build());
10941           onChanged();
10942         } else {
10943           columnValueBuilder_.setMessage(index, builderForValue.build());
10944         }
10945         return this;
10946       }
10947       /**
10948        * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
10949        */
10950       public Builder addColumnValue(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue value) {
10951         if (columnValueBuilder_ == null) {
10952           if (value == null) {
10953             throw new NullPointerException();
10954           }
10955           ensureColumnValueIsMutable();
10956           columnValue_.add(value);
10957           onChanged();
10958         } else {
10959           columnValueBuilder_.addMessage(value);
10960         }
10961         return this;
10962       }
10963       /**
10964        * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
10965        */
10966       public Builder addColumnValue(
10967           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue value) {
10968         if (columnValueBuilder_ == null) {
10969           if (value == null) {
10970             throw new NullPointerException();
10971           }
10972           ensureColumnValueIsMutable();
10973           columnValue_.add(index, value);
10974           onChanged();
10975         } else {
10976           columnValueBuilder_.addMessage(index, value);
10977         }
10978         return this;
10979       }
10980       /**
10981        * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
10982        */
10983       public Builder addColumnValue(
10984           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder builderForValue) {
10985         if (columnValueBuilder_ == null) {
10986           ensureColumnValueIsMutable();
10987           columnValue_.add(builderForValue.build());
10988           onChanged();
10989         } else {
10990           columnValueBuilder_.addMessage(builderForValue.build());
10991         }
10992         return this;
10993       }
10994       /**
10995        * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
10996        */
10997       public Builder addColumnValue(
10998           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder builderForValue) {
10999         if (columnValueBuilder_ == null) {
11000           ensureColumnValueIsMutable();
11001           columnValue_.add(index, builderForValue.build());
11002           onChanged();
11003         } else {
11004           columnValueBuilder_.addMessage(index, builderForValue.build());
11005         }
11006         return this;
11007       }
11008       /**
11009        * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
11010        */
11011       public Builder addAllColumnValue(
11012           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue> values) {
11013         if (columnValueBuilder_ == null) {
11014           ensureColumnValueIsMutable();
11015           super.addAll(values, columnValue_);
11016           onChanged();
11017         } else {
11018           columnValueBuilder_.addAllMessages(values);
11019         }
11020         return this;
11021       }
11022       /**
11023        * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
11024        */
11025       public Builder clearColumnValue() {
11026         if (columnValueBuilder_ == null) {
11027           columnValue_ = java.util.Collections.emptyList();
11028           bitField0_ = (bitField0_ & ~0x00000004);
11029           onChanged();
11030         } else {
11031           columnValueBuilder_.clear();
11032         }
11033         return this;
11034       }
11035       /**
11036        * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
11037        */
11038       public Builder removeColumnValue(int index) {
11039         if (columnValueBuilder_ == null) {
11040           ensureColumnValueIsMutable();
11041           columnValue_.remove(index);
11042           onChanged();
11043         } else {
11044           columnValueBuilder_.remove(index);
11045         }
11046         return this;
11047       }
11048       /**
11049        * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
11050        */
11051       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder getColumnValueBuilder(
11052           int index) {
11053         return getColumnValueFieldBuilder().getBuilder(index);
11054       }
11055       /**
11056        * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
11057        */
11058       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder getColumnValueOrBuilder(
11059           int index) {
11060         if (columnValueBuilder_ == null) {
11061           return columnValue_.get(index);  } else {
11062           return columnValueBuilder_.getMessageOrBuilder(index);
11063         }
11064       }
11065       /**
11066        * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
11067        */
11068       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder> 
11069            getColumnValueOrBuilderList() {
11070         if (columnValueBuilder_ != null) {
11071           return columnValueBuilder_.getMessageOrBuilderList();
11072         } else {
11073           return java.util.Collections.unmodifiableList(columnValue_);
11074         }
11075       }
11076       /**
11077        * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
11078        */
11079       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder addColumnValueBuilder() {
11080         return getColumnValueFieldBuilder().addBuilder(
11081             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.getDefaultInstance());
11082       }
11083       /**
11084        * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
11085        */
11086       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder addColumnValueBuilder(
11087           int index) {
11088         return getColumnValueFieldBuilder().addBuilder(
11089             index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.getDefaultInstance());
11090       }
11091       /**
11092        * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
11093        */
11094       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder> 
11095            getColumnValueBuilderList() {
11096         return getColumnValueFieldBuilder().getBuilderList();
11097       }
11098       private com.google.protobuf.RepeatedFieldBuilder<
11099           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder> 
11100           getColumnValueFieldBuilder() {
11101         if (columnValueBuilder_ == null) {
11102           columnValueBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
11103               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder>(
11104                   columnValue_,
11105                   ((bitField0_ & 0x00000004) == 0x00000004),
11106                   getParentForChildren(),
11107                   isClean());
11108           columnValue_ = null;
11109         }
11110         return columnValueBuilder_;
11111       }
11112 
11113       // optional uint64 timestamp = 4;
11114       private long timestamp_ ;
11115       /**
11116        * <code>optional uint64 timestamp = 4;</code>
11117        */
11118       public boolean hasTimestamp() {
11119         return ((bitField0_ & 0x00000008) == 0x00000008);
11120       }
11121       /**
11122        * <code>optional uint64 timestamp = 4;</code>
11123        */
11124       public long getTimestamp() {
11125         return timestamp_;
11126       }
11127       /**
11128        * <code>optional uint64 timestamp = 4;</code>
11129        */
11130       public Builder setTimestamp(long value) {
11131         bitField0_ |= 0x00000008;
11132         timestamp_ = value;
11133         onChanged();
11134         return this;
11135       }
11136       /**
11137        * <code>optional uint64 timestamp = 4;</code>
11138        */
11139       public Builder clearTimestamp() {
11140         bitField0_ = (bitField0_ & ~0x00000008);
11141         timestamp_ = 0L;
11142         onChanged();
11143         return this;
11144       }
11145 
11146       // repeated .hbase.pb.NameBytesPair attribute = 5;
11147       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> attribute_ =
11148         java.util.Collections.emptyList();
11149       private void ensureAttributeIsMutable() {
11150         if (!((bitField0_ & 0x00000010) == 0x00000010)) {
11151           attribute_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair>(attribute_);
11152           bitField0_ |= 0x00000010;
11153          }
11154       }
11155 
11156       private com.google.protobuf.RepeatedFieldBuilder<
11157           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> attributeBuilder_;
11158 
11159       /**
11160        * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
11161        */
11162       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> getAttributeList() {
11163         if (attributeBuilder_ == null) {
11164           return java.util.Collections.unmodifiableList(attribute_);
11165         } else {
11166           return attributeBuilder_.getMessageList();
11167         }
11168       }
11169       /**
11170        * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
11171        */
11172       public int getAttributeCount() {
11173         if (attributeBuilder_ == null) {
11174           return attribute_.size();
11175         } else {
11176           return attributeBuilder_.getCount();
11177         }
11178       }
11179       /**
11180        * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
11181        */
11182       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) {
11183         if (attributeBuilder_ == null) {
11184           return attribute_.get(index);
11185         } else {
11186           return attributeBuilder_.getMessage(index);
11187         }
11188       }
11189       /**
11190        * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
11191        */
11192       public Builder setAttribute(
11193           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
11194         if (attributeBuilder_ == null) {
11195           if (value == null) {
11196             throw new NullPointerException();
11197           }
11198           ensureAttributeIsMutable();
11199           attribute_.set(index, value);
11200           onChanged();
11201         } else {
11202           attributeBuilder_.setMessage(index, value);
11203         }
11204         return this;
11205       }
11206       /**
11207        * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
11208        */
11209       public Builder setAttribute(
11210           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
11211         if (attributeBuilder_ == null) {
11212           ensureAttributeIsMutable();
11213           attribute_.set(index, builderForValue.build());
11214           onChanged();
11215         } else {
11216           attributeBuilder_.setMessage(index, builderForValue.build());
11217         }
11218         return this;
11219       }
11220       /**
11221        * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
11222        */
11223       public Builder addAttribute(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
11224         if (attributeBuilder_ == null) {
11225           if (value == null) {
11226             throw new NullPointerException();
11227           }
11228           ensureAttributeIsMutable();
11229           attribute_.add(value);
11230           onChanged();
11231         } else {
11232           attributeBuilder_.addMessage(value);
11233         }
11234         return this;
11235       }
11236       /**
11237        * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
11238        */
11239       public Builder addAttribute(
11240           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
11241         if (attributeBuilder_ == null) {
11242           if (value == null) {
11243             throw new NullPointerException();
11244           }
11245           ensureAttributeIsMutable();
11246           attribute_.add(index, value);
11247           onChanged();
11248         } else {
11249           attributeBuilder_.addMessage(index, value);
11250         }
11251         return this;
11252       }
11253       /**
11254        * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
11255        */
11256       public Builder addAttribute(
11257           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
11258         if (attributeBuilder_ == null) {
11259           ensureAttributeIsMutable();
11260           attribute_.add(builderForValue.build());
11261           onChanged();
11262         } else {
11263           attributeBuilder_.addMessage(builderForValue.build());
11264         }
11265         return this;
11266       }
11267       /**
11268        * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
11269        */
11270       public Builder addAttribute(
11271           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
11272         if (attributeBuilder_ == null) {
11273           ensureAttributeIsMutable();
11274           attribute_.add(index, builderForValue.build());
11275           onChanged();
11276         } else {
11277           attributeBuilder_.addMessage(index, builderForValue.build());
11278         }
11279         return this;
11280       }
11281       /**
11282        * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
11283        */
11284       public Builder addAllAttribute(
11285           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> values) {
11286         if (attributeBuilder_ == null) {
11287           ensureAttributeIsMutable();
11288           super.addAll(values, attribute_);
11289           onChanged();
11290         } else {
11291           attributeBuilder_.addAllMessages(values);
11292         }
11293         return this;
11294       }
11295       /**
11296        * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
11297        */
11298       public Builder clearAttribute() {
11299         if (attributeBuilder_ == null) {
11300           attribute_ = java.util.Collections.emptyList();
11301           bitField0_ = (bitField0_ & ~0x00000010);
11302           onChanged();
11303         } else {
11304           attributeBuilder_.clear();
11305         }
11306         return this;
11307       }
11308       /**
11309        * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
11310        */
11311       public Builder removeAttribute(int index) {
11312         if (attributeBuilder_ == null) {
11313           ensureAttributeIsMutable();
11314           attribute_.remove(index);
11315           onChanged();
11316         } else {
11317           attributeBuilder_.remove(index);
11318         }
11319         return this;
11320       }
11321       /**
11322        * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
11323        */
11324       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getAttributeBuilder(
11325           int index) {
11326         return getAttributeFieldBuilder().getBuilder(index);
11327       }
11328       /**
11329        * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
11330        */
11331       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder(
11332           int index) {
11333         if (attributeBuilder_ == null) {
11334           return attribute_.get(index);  } else {
11335           return attributeBuilder_.getMessageOrBuilder(index);
11336         }
11337       }
11338       /**
11339        * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
11340        */
11341       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> 
11342            getAttributeOrBuilderList() {
11343         if (attributeBuilder_ != null) {
11344           return attributeBuilder_.getMessageOrBuilderList();
11345         } else {
11346           return java.util.Collections.unmodifiableList(attribute_);
11347         }
11348       }
11349       /**
11350        * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
11351        */
11352       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder() {
11353         return getAttributeFieldBuilder().addBuilder(
11354             org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance());
11355       }
11356       /**
11357        * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
11358        */
11359       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder(
11360           int index) {
11361         return getAttributeFieldBuilder().addBuilder(
11362             index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance());
11363       }
11364       /**
11365        * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
11366        */
11367       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder> 
11368            getAttributeBuilderList() {
11369         return getAttributeFieldBuilder().getBuilderList();
11370       }
11371       private com.google.protobuf.RepeatedFieldBuilder<
11372           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> 
11373           getAttributeFieldBuilder() {
11374         if (attributeBuilder_ == null) {
11375           attributeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
11376               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>(
11377                   attribute_,
11378                   ((bitField0_ & 0x00000010) == 0x00000010),
11379                   getParentForChildren(),
11380                   isClean());
11381           attribute_ = null;
11382         }
11383         return attributeBuilder_;
11384       }
11385 
11386       // optional .hbase.pb.MutationProto.Durability durability = 6 [default = USE_DEFAULT];
11387       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability durability_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability.USE_DEFAULT;
11388       /**
11389        * <code>optional .hbase.pb.MutationProto.Durability durability = 6 [default = USE_DEFAULT];</code>
11390        */
11391       public boolean hasDurability() {
11392         return ((bitField0_ & 0x00000020) == 0x00000020);
11393       }
11394       /**
11395        * <code>optional .hbase.pb.MutationProto.Durability durability = 6 [default = USE_DEFAULT];</code>
11396        */
11397       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability getDurability() {
11398         return durability_;
11399       }
11400       /**
11401        * <code>optional .hbase.pb.MutationProto.Durability durability = 6 [default = USE_DEFAULT];</code>
11402        */
11403       public Builder setDurability(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability value) {
11404         if (value == null) {
11405           throw new NullPointerException();
11406         }
11407         bitField0_ |= 0x00000020;
11408         durability_ = value;
11409         onChanged();
11410         return this;
11411       }
11412       /**
11413        * <code>optional .hbase.pb.MutationProto.Durability durability = 6 [default = USE_DEFAULT];</code>
11414        */
11415       public Builder clearDurability() {
11416         bitField0_ = (bitField0_ & ~0x00000020);
11417         durability_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability.USE_DEFAULT;
11418         onChanged();
11419         return this;
11420       }
11421 
11422       // optional .hbase.pb.TimeRange time_range = 7;
11423       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance();
11424       private com.google.protobuf.SingleFieldBuilder<
11425           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> timeRangeBuilder_;
11426       /**
11427        * <code>optional .hbase.pb.TimeRange time_range = 7;</code>
11428        *
11429        * <pre>
11430        * For some mutations, a result may be returned, in which case,
11431        * time range can be specified for potential performance gain
11432        * </pre>
11433        */
11434       public boolean hasTimeRange() {
11435         return ((bitField0_ & 0x00000040) == 0x00000040);
11436       }
11437       /**
11438        * <code>optional .hbase.pb.TimeRange time_range = 7;</code>
11439        *
11440        * <pre>
11441        * For some mutations, a result may be returned, in which case,
11442        * time range can be specified for potential performance gain
11443        * </pre>
11444        */
11445       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() {
11446         if (timeRangeBuilder_ == null) {
11447           return timeRange_;
11448         } else {
11449           return timeRangeBuilder_.getMessage();
11450         }
11451       }
11452       /**
11453        * <code>optional .hbase.pb.TimeRange time_range = 7;</code>
11454        *
11455        * <pre>
11456        * For some mutations, a result may be returned, in which case,
11457        * time range can be specified for potential performance gain
11458        * </pre>
11459        */
11460       public Builder setTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) {
11461         if (timeRangeBuilder_ == null) {
11462           if (value == null) {
11463             throw new NullPointerException();
11464           }
11465           timeRange_ = value;
11466           onChanged();
11467         } else {
11468           timeRangeBuilder_.setMessage(value);
11469         }
11470         bitField0_ |= 0x00000040;
11471         return this;
11472       }
11473       /**
11474        * <code>optional .hbase.pb.TimeRange time_range = 7;</code>
11475        *
11476        * <pre>
11477        * For some mutations, a result may be returned, in which case,
11478        * time range can be specified for potential performance gain
11479        * </pre>
11480        */
11481       public Builder setTimeRange(
11482           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder builderForValue) {
11483         if (timeRangeBuilder_ == null) {
11484           timeRange_ = builderForValue.build();
11485           onChanged();
11486         } else {
11487           timeRangeBuilder_.setMessage(builderForValue.build());
11488         }
11489         bitField0_ |= 0x00000040;
11490         return this;
11491       }
11492       /**
11493        * <code>optional .hbase.pb.TimeRange time_range = 7;</code>
11494        *
11495        * <pre>
11496        * For some mutations, a result may be returned, in which case,
11497        * time range can be specified for potential performance gain
11498        * </pre>
11499        */
11500       public Builder mergeTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) {
11501         if (timeRangeBuilder_ == null) {
11502           if (((bitField0_ & 0x00000040) == 0x00000040) &&
11503               timeRange_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance()) {
11504             timeRange_ =
11505               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder(timeRange_).mergeFrom(value).buildPartial();
11506           } else {
11507             timeRange_ = value;
11508           }
11509           onChanged();
11510         } else {
11511           timeRangeBuilder_.mergeFrom(value);
11512         }
11513         bitField0_ |= 0x00000040;
11514         return this;
11515       }
11516       /**
11517        * <code>optional .hbase.pb.TimeRange time_range = 7;</code>
11518        *
11519        * <pre>
11520        * For some mutations, a result may be returned, in which case,
11521        * time range can be specified for potential performance gain
11522        * </pre>
11523        */
11524       public Builder clearTimeRange() {
11525         if (timeRangeBuilder_ == null) {
11526           timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance();
11527           onChanged();
11528         } else {
11529           timeRangeBuilder_.clear();
11530         }
11531         bitField0_ = (bitField0_ & ~0x00000040);
11532         return this;
11533       }
11534       /**
11535        * <code>optional .hbase.pb.TimeRange time_range = 7;</code>
11536        *
11537        * <pre>
11538        * For some mutations, a result may be returned, in which case,
11539        * time range can be specified for potential performance gain
11540        * </pre>
11541        */
11542       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder getTimeRangeBuilder() {
11543         bitField0_ |= 0x00000040;
11544         onChanged();
11545         return getTimeRangeFieldBuilder().getBuilder();
11546       }
11547       /**
11548        * <code>optional .hbase.pb.TimeRange time_range = 7;</code>
11549        *
11550        * <pre>
11551        * For some mutations, a result may be returned, in which case,
11552        * time range can be specified for potential performance gain
11553        * </pre>
11554        */
11555       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() {
11556         if (timeRangeBuilder_ != null) {
11557           return timeRangeBuilder_.getMessageOrBuilder();
11558         } else {
11559           return timeRange_;
11560         }
11561       }
11562       /**
11563        * <code>optional .hbase.pb.TimeRange time_range = 7;</code>
11564        *
11565        * <pre>
11566        * For some mutations, a result may be returned, in which case,
11567        * time range can be specified for potential performance gain
11568        * </pre>
11569        */
11570       private com.google.protobuf.SingleFieldBuilder<
11571           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> 
11572           getTimeRangeFieldBuilder() {
11573         if (timeRangeBuilder_ == null) {
11574           timeRangeBuilder_ = new com.google.protobuf.SingleFieldBuilder<
11575               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder>(
11576                   timeRange_,
11577                   getParentForChildren(),
11578                   isClean());
11579           timeRange_ = null;
11580         }
11581         return timeRangeBuilder_;
11582       }
11583 
11584       // optional int32 associated_cell_count = 8;
11585       private int associatedCellCount_ ;
11586       /**
11587        * <code>optional int32 associated_cell_count = 8;</code>
11588        *
11589        * <pre>
11590        * The below count is set when the associated cells are NOT
11591        * part of this protobuf message; they are passed alongside
11592        * and then this Message is a placeholder with metadata.  The
11593        * count is needed to know how many to peel off the block of Cells as
11594        * ours.  NOTE: This is different from the pb managed cell_count of the
11595        * 'cell' field above which is non-null when the cells are pb'd.
11596        * </pre>
11597        */
11598       public boolean hasAssociatedCellCount() {
11599         return ((bitField0_ & 0x00000080) == 0x00000080);
11600       }
11601       /**
11602        * <code>optional int32 associated_cell_count = 8;</code>
11603        *
11604        * <pre>
11605        * The below count is set when the associated cells are NOT
11606        * part of this protobuf message; they are passed alongside
11607        * and then this Message is a placeholder with metadata.  The
11608        * count is needed to know how many to peel off the block of Cells as
11609        * ours.  NOTE: This is different from the pb managed cell_count of the
11610        * 'cell' field above which is non-null when the cells are pb'd.
11611        * </pre>
11612        */
11613       public int getAssociatedCellCount() {
11614         return associatedCellCount_;
11615       }
11616       /**
11617        * <code>optional int32 associated_cell_count = 8;</code>
11618        *
11619        * <pre>
11620        * The below count is set when the associated cells are NOT
11621        * part of this protobuf message; they are passed alongside
11622        * and then this Message is a placeholder with metadata.  The
11623        * count is needed to know how many to peel off the block of Cells as
11624        * ours.  NOTE: This is different from the pb managed cell_count of the
11625        * 'cell' field above which is non-null when the cells are pb'd.
11626        * </pre>
11627        */
11628       public Builder setAssociatedCellCount(int value) {
11629         bitField0_ |= 0x00000080;
11630         associatedCellCount_ = value;
11631         onChanged();
11632         return this;
11633       }
11634       /**
11635        * <code>optional int32 associated_cell_count = 8;</code>
11636        *
11637        * <pre>
11638        * The below count is set when the associated cells are NOT
11639        * part of this protobuf message; they are passed alongside
11640        * and then this Message is a placeholder with metadata.  The
11641        * count is needed to know how many to peel off the block of Cells as
11642        * ours.  NOTE: This is different from the pb managed cell_count of the
11643        * 'cell' field above which is non-null when the cells are pb'd.
11644        * </pre>
11645        */
11646       public Builder clearAssociatedCellCount() {
11647         bitField0_ = (bitField0_ & ~0x00000080);
11648         associatedCellCount_ = 0;
11649         onChanged();
11650         return this;
11651       }
11652 
11653       // optional uint64 nonce = 9;
11654       private long nonce_ ;
11655       /**
11656        * <code>optional uint64 nonce = 9;</code>
11657        */
11658       public boolean hasNonce() {
11659         return ((bitField0_ & 0x00000100) == 0x00000100);
11660       }
11661       /**
11662        * <code>optional uint64 nonce = 9;</code>
11663        */
11664       public long getNonce() {
11665         return nonce_;
11666       }
11667       /**
11668        * <code>optional uint64 nonce = 9;</code>
11669        */
11670       public Builder setNonce(long value) {
11671         bitField0_ |= 0x00000100;
11672         nonce_ = value;
11673         onChanged();
11674         return this;
11675       }
11676       /**
11677        * <code>optional uint64 nonce = 9;</code>
11678        */
11679       public Builder clearNonce() {
11680         bitField0_ = (bitField0_ & ~0x00000100);
11681         nonce_ = 0L;
11682         onChanged();
11683         return this;
11684       }
11685 
11686       // @@protoc_insertion_point(builder_scope:hbase.pb.MutationProto)
11687     }
11688 
11689     static {
11690       defaultInstance = new MutationProto(true);
11691       defaultInstance.initFields();
11692     }
11693 
11694     // @@protoc_insertion_point(class_scope:hbase.pb.MutationProto)
11695   }
11696 
11697   public interface MutateRequestOrBuilder
11698       extends com.google.protobuf.MessageOrBuilder {
11699 
11700     // required .hbase.pb.RegionSpecifier region = 1;
11701     /**
11702      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
11703      */
11704     boolean hasRegion();
11705     /**
11706      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
11707      */
11708     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
11709     /**
11710      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
11711      */
11712     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
11713 
11714     // required .hbase.pb.MutationProto mutation = 2;
11715     /**
11716      * <code>required .hbase.pb.MutationProto mutation = 2;</code>
11717      */
11718     boolean hasMutation();
11719     /**
11720      * <code>required .hbase.pb.MutationProto mutation = 2;</code>
11721      */
11722     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutation();
11723     /**
11724      * <code>required .hbase.pb.MutationProto mutation = 2;</code>
11725      */
11726     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationOrBuilder();
11727 
11728     // optional .hbase.pb.Condition condition = 3;
11729     /**
11730      * <code>optional .hbase.pb.Condition condition = 3;</code>
11731      */
11732     boolean hasCondition();
11733     /**
11734      * <code>optional .hbase.pb.Condition condition = 3;</code>
11735      */
11736     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition getCondition();
11737     /**
11738      * <code>optional .hbase.pb.Condition condition = 3;</code>
11739      */
11740     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder getConditionOrBuilder();
11741 
11742     // optional uint64 nonce_group = 4;
11743     /**
11744      * <code>optional uint64 nonce_group = 4;</code>
11745      */
11746     boolean hasNonceGroup();
11747     /**
11748      * <code>optional uint64 nonce_group = 4;</code>
11749      */
11750     long getNonceGroup();
11751   }
11752   /**
11753    * Protobuf type {@code hbase.pb.MutateRequest}
11754    *
11755    * <pre>
11756    **
11757    * The mutate request. Perform a single Mutate operation.
11758    *
11759    * Optionally, you can specify a condition. The mutate
11760    * will take place only if the condition is met.  Otherwise,
11761    * the mutate will be ignored.  In the response result,
11762    * parameter processed is used to indicate if the mutate
11763    * actually happened.
11764    * </pre>
11765    */
11766   public static final class MutateRequest extends
11767       com.google.protobuf.GeneratedMessage
11768       implements MutateRequestOrBuilder {
11769     // Use MutateRequest.newBuilder() to construct.
11770     private MutateRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
11771       super(builder);
11772       this.unknownFields = builder.getUnknownFields();
11773     }
11774     private MutateRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
11775 
11776     private static final MutateRequest defaultInstance;
11777     public static MutateRequest getDefaultInstance() {
11778       return defaultInstance;
11779     }
11780 
11781     public MutateRequest getDefaultInstanceForType() {
11782       return defaultInstance;
11783     }
11784 
11785     private final com.google.protobuf.UnknownFieldSet unknownFields;
11786     @java.lang.Override
11787     public final com.google.protobuf.UnknownFieldSet
11788         getUnknownFields() {
11789       return this.unknownFields;
11790     }
11791     private MutateRequest(
11792         com.google.protobuf.CodedInputStream input,
11793         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11794         throws com.google.protobuf.InvalidProtocolBufferException {
11795       initFields();
11796       int mutable_bitField0_ = 0;
11797       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
11798           com.google.protobuf.UnknownFieldSet.newBuilder();
11799       try {
11800         boolean done = false;
11801         while (!done) {
11802           int tag = input.readTag();
11803           switch (tag) {
11804             case 0:
11805               done = true;
11806               break;
11807             default: {
11808               if (!parseUnknownField(input, unknownFields,
11809                                      extensionRegistry, tag)) {
11810                 done = true;
11811               }
11812               break;
11813             }
11814             case 10: {
11815               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null;
11816               if (((bitField0_ & 0x00000001) == 0x00000001)) {
11817                 subBuilder = region_.toBuilder();
11818               }
11819               region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry);
11820               if (subBuilder != null) {
11821                 subBuilder.mergeFrom(region_);
11822                 region_ = subBuilder.buildPartial();
11823               }
11824               bitField0_ |= 0x00000001;
11825               break;
11826             }
11827             case 18: {
11828               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder subBuilder = null;
11829               if (((bitField0_ & 0x00000002) == 0x00000002)) {
11830                 subBuilder = mutation_.toBuilder();
11831               }
11832               mutation_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.PARSER, extensionRegistry);
11833               if (subBuilder != null) {
11834                 subBuilder.mergeFrom(mutation_);
11835                 mutation_ = subBuilder.buildPartial();
11836               }
11837               bitField0_ |= 0x00000002;
11838               break;
11839             }
11840             case 26: {
11841               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder subBuilder = null;
11842               if (((bitField0_ & 0x00000004) == 0x00000004)) {
11843                 subBuilder = condition_.toBuilder();
11844               }
11845               condition_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.PARSER, extensionRegistry);
11846               if (subBuilder != null) {
11847                 subBuilder.mergeFrom(condition_);
11848                 condition_ = subBuilder.buildPartial();
11849               }
11850               bitField0_ |= 0x00000004;
11851               break;
11852             }
11853             case 32: {
11854               bitField0_ |= 0x00000008;
11855               nonceGroup_ = input.readUInt64();
11856               break;
11857             }
11858           }
11859         }
11860       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
11861         throw e.setUnfinishedMessage(this);
11862       } catch (java.io.IOException e) {
11863         throw new com.google.protobuf.InvalidProtocolBufferException(
11864             e.getMessage()).setUnfinishedMessage(this);
11865       } finally {
11866         this.unknownFields = unknownFields.build();
11867         makeExtensionsImmutable();
11868       }
11869     }
11870     public static final com.google.protobuf.Descriptors.Descriptor
11871         getDescriptor() {
11872       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutateRequest_descriptor;
11873     }
11874 
11875     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
11876         internalGetFieldAccessorTable() {
11877       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutateRequest_fieldAccessorTable
11878           .ensureFieldAccessorsInitialized(
11879               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.Builder.class);
11880     }
11881 
11882     public static com.google.protobuf.Parser<MutateRequest> PARSER =
11883         new com.google.protobuf.AbstractParser<MutateRequest>() {
11884       public MutateRequest parsePartialFrom(
11885           com.google.protobuf.CodedInputStream input,
11886           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11887           throws com.google.protobuf.InvalidProtocolBufferException {
11888         return new MutateRequest(input, extensionRegistry);
11889       }
11890     };
11891 
11892     @java.lang.Override
11893     public com.google.protobuf.Parser<MutateRequest> getParserForType() {
11894       return PARSER;
11895     }
11896 
11897     private int bitField0_;
11898     // required .hbase.pb.RegionSpecifier region = 1;
11899     public static final int REGION_FIELD_NUMBER = 1;
11900     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_;
11901     /**
11902      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
11903      */
11904     public boolean hasRegion() {
11905       return ((bitField0_ & 0x00000001) == 0x00000001);
11906     }
11907     /**
11908      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
11909      */
11910     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
11911       return region_;
11912     }
11913     /**
11914      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
11915      */
11916     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
11917       return region_;
11918     }
11919 
11920     // required .hbase.pb.MutationProto mutation = 2;
11921     public static final int MUTATION_FIELD_NUMBER = 2;
11922     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto mutation_;
11923     /**
11924      * <code>required .hbase.pb.MutationProto mutation = 2;</code>
11925      */
11926     public boolean hasMutation() {
11927       return ((bitField0_ & 0x00000002) == 0x00000002);
11928     }
11929     /**
11930      * <code>required .hbase.pb.MutationProto mutation = 2;</code>
11931      */
11932     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutation() {
11933       return mutation_;
11934     }
11935     /**
11936      * <code>required .hbase.pb.MutationProto mutation = 2;</code>
11937      */
11938     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationOrBuilder() {
11939       return mutation_;
11940     }
11941 
11942     // optional .hbase.pb.Condition condition = 3;
11943     public static final int CONDITION_FIELD_NUMBER = 3;
11944     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition condition_;
11945     /**
11946      * <code>optional .hbase.pb.Condition condition = 3;</code>
11947      */
11948     public boolean hasCondition() {
11949       return ((bitField0_ & 0x00000004) == 0x00000004);
11950     }
11951     /**
11952      * <code>optional .hbase.pb.Condition condition = 3;</code>
11953      */
11954     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition getCondition() {
11955       return condition_;
11956     }
11957     /**
11958      * <code>optional .hbase.pb.Condition condition = 3;</code>
11959      */
11960     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder getConditionOrBuilder() {
11961       return condition_;
11962     }
11963 
11964     // optional uint64 nonce_group = 4;
11965     public static final int NONCE_GROUP_FIELD_NUMBER = 4;
11966     private long nonceGroup_;
11967     /**
11968      * <code>optional uint64 nonce_group = 4;</code>
11969      */
11970     public boolean hasNonceGroup() {
11971       return ((bitField0_ & 0x00000008) == 0x00000008);
11972     }
11973     /**
11974      * <code>optional uint64 nonce_group = 4;</code>
11975      */
11976     public long getNonceGroup() {
11977       return nonceGroup_;
11978     }
11979 
11980     private void initFields() {
11981       region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
11982       mutation_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance();
11983       condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance();
11984       nonceGroup_ = 0L;
11985     }
11986     private byte memoizedIsInitialized = -1;
11987     public final boolean isInitialized() {
11988       byte isInitialized = memoizedIsInitialized;
11989       if (isInitialized != -1) return isInitialized == 1;
11990 
11991       if (!hasRegion()) {
11992         memoizedIsInitialized = 0;
11993         return false;
11994       }
11995       if (!hasMutation()) {
11996         memoizedIsInitialized = 0;
11997         return false;
11998       }
11999       if (!getRegion().isInitialized()) {
12000         memoizedIsInitialized = 0;
12001         return false;
12002       }
12003       if (!getMutation().isInitialized()) {
12004         memoizedIsInitialized = 0;
12005         return false;
12006       }
12007       if (hasCondition()) {
12008         if (!getCondition().isInitialized()) {
12009           memoizedIsInitialized = 0;
12010           return false;
12011         }
12012       }
12013       memoizedIsInitialized = 1;
12014       return true;
12015     }
12016 
12017     public void writeTo(com.google.protobuf.CodedOutputStream output)
12018                         throws java.io.IOException {
12019       getSerializedSize();
12020       if (((bitField0_ & 0x00000001) == 0x00000001)) {
12021         output.writeMessage(1, region_);
12022       }
12023       if (((bitField0_ & 0x00000002) == 0x00000002)) {
12024         output.writeMessage(2, mutation_);
12025       }
12026       if (((bitField0_ & 0x00000004) == 0x00000004)) {
12027         output.writeMessage(3, condition_);
12028       }
12029       if (((bitField0_ & 0x00000008) == 0x00000008)) {
12030         output.writeUInt64(4, nonceGroup_);
12031       }
12032       getUnknownFields().writeTo(output);
12033     }
12034 
12035     private int memoizedSerializedSize = -1;
12036     public int getSerializedSize() {
12037       int size = memoizedSerializedSize;
12038       if (size != -1) return size;
12039 
12040       size = 0;
12041       if (((bitField0_ & 0x00000001) == 0x00000001)) {
12042         size += com.google.protobuf.CodedOutputStream
12043           .computeMessageSize(1, region_);
12044       }
12045       if (((bitField0_ & 0x00000002) == 0x00000002)) {
12046         size += com.google.protobuf.CodedOutputStream
12047           .computeMessageSize(2, mutation_);
12048       }
12049       if (((bitField0_ & 0x00000004) == 0x00000004)) {
12050         size += com.google.protobuf.CodedOutputStream
12051           .computeMessageSize(3, condition_);
12052       }
12053       if (((bitField0_ & 0x00000008) == 0x00000008)) {
12054         size += com.google.protobuf.CodedOutputStream
12055           .computeUInt64Size(4, nonceGroup_);
12056       }
12057       size += getUnknownFields().getSerializedSize();
12058       memoizedSerializedSize = size;
12059       return size;
12060     }
12061 
12062     private static final long serialVersionUID = 0L;
12063     @java.lang.Override
12064     protected java.lang.Object writeReplace()
12065         throws java.io.ObjectStreamException {
12066       return super.writeReplace();
12067     }
12068 
12069     @java.lang.Override
12070     public boolean equals(final java.lang.Object obj) {
12071       if (obj == this) {
12072        return true;
12073       }
12074       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest)) {
12075         return super.equals(obj);
12076       }
12077       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest) obj;
12078 
12079       boolean result = true;
12080       result = result && (hasRegion() == other.hasRegion());
12081       if (hasRegion()) {
12082         result = result && getRegion()
12083             .equals(other.getRegion());
12084       }
12085       result = result && (hasMutation() == other.hasMutation());
12086       if (hasMutation()) {
12087         result = result && getMutation()
12088             .equals(other.getMutation());
12089       }
12090       result = result && (hasCondition() == other.hasCondition());
12091       if (hasCondition()) {
12092         result = result && getCondition()
12093             .equals(other.getCondition());
12094       }
12095       result = result && (hasNonceGroup() == other.hasNonceGroup());
12096       if (hasNonceGroup()) {
12097         result = result && (getNonceGroup()
12098             == other.getNonceGroup());
12099       }
12100       result = result &&
12101           getUnknownFields().equals(other.getUnknownFields());
12102       return result;
12103     }
12104 
12105     private int memoizedHashCode = 0;
12106     @java.lang.Override
12107     public int hashCode() {
12108       if (memoizedHashCode != 0) {
12109         return memoizedHashCode;
12110       }
12111       int hash = 41;
12112       hash = (19 * hash) + getDescriptorForType().hashCode();
12113       if (hasRegion()) {
12114         hash = (37 * hash) + REGION_FIELD_NUMBER;
12115         hash = (53 * hash) + getRegion().hashCode();
12116       }
12117       if (hasMutation()) {
12118         hash = (37 * hash) + MUTATION_FIELD_NUMBER;
12119         hash = (53 * hash) + getMutation().hashCode();
12120       }
12121       if (hasCondition()) {
12122         hash = (37 * hash) + CONDITION_FIELD_NUMBER;
12123         hash = (53 * hash) + getCondition().hashCode();
12124       }
12125       if (hasNonceGroup()) {
12126         hash = (37 * hash) + NONCE_GROUP_FIELD_NUMBER;
12127         hash = (53 * hash) + hashLong(getNonceGroup());
12128       }
12129       hash = (29 * hash) + getUnknownFields().hashCode();
12130       memoizedHashCode = hash;
12131       return hash;
12132     }
12133 
12134     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom(
12135         com.google.protobuf.ByteString data)
12136         throws com.google.protobuf.InvalidProtocolBufferException {
12137       return PARSER.parseFrom(data);
12138     }
12139     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom(
12140         com.google.protobuf.ByteString data,
12141         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12142         throws com.google.protobuf.InvalidProtocolBufferException {
12143       return PARSER.parseFrom(data, extensionRegistry);
12144     }
12145     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom(byte[] data)
12146         throws com.google.protobuf.InvalidProtocolBufferException {
12147       return PARSER.parseFrom(data);
12148     }
12149     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom(
12150         byte[] data,
12151         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12152         throws com.google.protobuf.InvalidProtocolBufferException {
12153       return PARSER.parseFrom(data, extensionRegistry);
12154     }
12155     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom(java.io.InputStream input)
12156         throws java.io.IOException {
12157       return PARSER.parseFrom(input);
12158     }
12159     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom(
12160         java.io.InputStream input,
12161         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12162         throws java.io.IOException {
12163       return PARSER.parseFrom(input, extensionRegistry);
12164     }
12165     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseDelimitedFrom(java.io.InputStream input)
12166         throws java.io.IOException {
12167       return PARSER.parseDelimitedFrom(input);
12168     }
12169     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseDelimitedFrom(
12170         java.io.InputStream input,
12171         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12172         throws java.io.IOException {
12173       return PARSER.parseDelimitedFrom(input, extensionRegistry);
12174     }
12175     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom(
12176         com.google.protobuf.CodedInputStream input)
12177         throws java.io.IOException {
12178       return PARSER.parseFrom(input);
12179     }
12180     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom(
12181         com.google.protobuf.CodedInputStream input,
12182         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12183         throws java.io.IOException {
12184       return PARSER.parseFrom(input, extensionRegistry);
12185     }
12186 
12187     public static Builder newBuilder() { return Builder.create(); }
12188     public Builder newBuilderForType() { return newBuilder(); }
12189     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest prototype) {
12190       return newBuilder().mergeFrom(prototype);
12191     }
12192     public Builder toBuilder() { return newBuilder(this); }
12193 
12194     @java.lang.Override
12195     protected Builder newBuilderForType(
12196         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
12197       Builder builder = new Builder(parent);
12198       return builder;
12199     }
12200     /**
12201      * Protobuf type {@code hbase.pb.MutateRequest}
12202      *
12203      * <pre>
12204      **
12205      * The mutate request. Perform a single Mutate operation.
12206      *
12207      * Optionally, you can specify a condition. The mutate
12208      * will take place only if the condition is met.  Otherwise,
12209      * the mutate will be ignored.  In the response result,
12210      * parameter processed is used to indicate if the mutate
12211      * actually happened.
12212      * </pre>
12213      */
12214     public static final class Builder extends
12215         com.google.protobuf.GeneratedMessage.Builder<Builder>
12216        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequestOrBuilder {
12217       public static final com.google.protobuf.Descriptors.Descriptor
12218           getDescriptor() {
12219         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutateRequest_descriptor;
12220       }
12221 
12222       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
12223           internalGetFieldAccessorTable() {
12224         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutateRequest_fieldAccessorTable
12225             .ensureFieldAccessorsInitialized(
12226                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.Builder.class);
12227       }
12228 
12229       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.newBuilder()
12230       private Builder() {
12231         maybeForceBuilderInitialization();
12232       }
12233 
12234       private Builder(
12235           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
12236         super(parent);
12237         maybeForceBuilderInitialization();
12238       }
12239       private void maybeForceBuilderInitialization() {
12240         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
12241           getRegionFieldBuilder();
12242           getMutationFieldBuilder();
12243           getConditionFieldBuilder();
12244         }
12245       }
12246       private static Builder create() {
12247         return new Builder();
12248       }
12249 
12250       public Builder clear() {
12251         super.clear();
12252         if (regionBuilder_ == null) {
12253           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
12254         } else {
12255           regionBuilder_.clear();
12256         }
12257         bitField0_ = (bitField0_ & ~0x00000001);
12258         if (mutationBuilder_ == null) {
12259           mutation_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance();
12260         } else {
12261           mutationBuilder_.clear();
12262         }
12263         bitField0_ = (bitField0_ & ~0x00000002);
12264         if (conditionBuilder_ == null) {
12265           condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance();
12266         } else {
12267           conditionBuilder_.clear();
12268         }
12269         bitField0_ = (bitField0_ & ~0x00000004);
12270         nonceGroup_ = 0L;
12271         bitField0_ = (bitField0_ & ~0x00000008);
12272         return this;
12273       }
12274 
12275       public Builder clone() {
12276         return create().mergeFrom(buildPartial());
12277       }
12278 
12279       public com.google.protobuf.Descriptors.Descriptor
12280           getDescriptorForType() {
12281         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutateRequest_descriptor;
12282       }
12283 
12284       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest getDefaultInstanceForType() {
12285         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.getDefaultInstance();
12286       }
12287 
12288       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest build() {
12289         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest result = buildPartial();
12290         if (!result.isInitialized()) {
12291           throw newUninitializedMessageException(result);
12292         }
12293         return result;
12294       }
12295 
12296       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest buildPartial() {
12297         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest(this);
12298         int from_bitField0_ = bitField0_;
12299         int to_bitField0_ = 0;
12300         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
12301           to_bitField0_ |= 0x00000001;
12302         }
12303         if (regionBuilder_ == null) {
12304           result.region_ = region_;
12305         } else {
12306           result.region_ = regionBuilder_.build();
12307         }
12308         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
12309           to_bitField0_ |= 0x00000002;
12310         }
12311         if (mutationBuilder_ == null) {
12312           result.mutation_ = mutation_;
12313         } else {
12314           result.mutation_ = mutationBuilder_.build();
12315         }
12316         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
12317           to_bitField0_ |= 0x00000004;
12318         }
12319         if (conditionBuilder_ == null) {
12320           result.condition_ = condition_;
12321         } else {
12322           result.condition_ = conditionBuilder_.build();
12323         }
12324         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
12325           to_bitField0_ |= 0x00000008;
12326         }
12327         result.nonceGroup_ = nonceGroup_;
12328         result.bitField0_ = to_bitField0_;
12329         onBuilt();
12330         return result;
12331       }
12332 
12333       public Builder mergeFrom(com.google.protobuf.Message other) {
12334         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest) {
12335           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest)other);
12336         } else {
12337           super.mergeFrom(other);
12338           return this;
12339         }
12340       }
12341 
12342       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest other) {
12343         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.getDefaultInstance()) return this;
12344         if (other.hasRegion()) {
12345           mergeRegion(other.getRegion());
12346         }
12347         if (other.hasMutation()) {
12348           mergeMutation(other.getMutation());
12349         }
12350         if (other.hasCondition()) {
12351           mergeCondition(other.getCondition());
12352         }
12353         if (other.hasNonceGroup()) {
12354           setNonceGroup(other.getNonceGroup());
12355         }
12356         this.mergeUnknownFields(other.getUnknownFields());
12357         return this;
12358       }
12359 
12360       public final boolean isInitialized() {
12361         if (!hasRegion()) {
12362           
12363           return false;
12364         }
12365         if (!hasMutation()) {
12366           
12367           return false;
12368         }
12369         if (!getRegion().isInitialized()) {
12370           
12371           return false;
12372         }
12373         if (!getMutation().isInitialized()) {
12374           
12375           return false;
12376         }
12377         if (hasCondition()) {
12378           if (!getCondition().isInitialized()) {
12379             
12380             return false;
12381           }
12382         }
12383         return true;
12384       }
12385 
12386       public Builder mergeFrom(
12387           com.google.protobuf.CodedInputStream input,
12388           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12389           throws java.io.IOException {
12390         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parsedMessage = null;
12391         try {
12392           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
12393         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
12394           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest) e.getUnfinishedMessage();
12395           throw e;
12396         } finally {
12397           if (parsedMessage != null) {
12398             mergeFrom(parsedMessage);
12399           }
12400         }
12401         return this;
12402       }
12403       private int bitField0_;
12404 
12405       // required .hbase.pb.RegionSpecifier region = 1;
12406       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
12407       private com.google.protobuf.SingleFieldBuilder<
12408           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_;
12409       /**
12410        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
12411        */
12412       public boolean hasRegion() {
12413         return ((bitField0_ & 0x00000001) == 0x00000001);
12414       }
12415       /**
12416        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
12417        */
12418       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
12419         if (regionBuilder_ == null) {
12420           return region_;
12421         } else {
12422           return regionBuilder_.getMessage();
12423         }
12424       }
12425       /**
12426        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
12427        */
12428       public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
12429         if (regionBuilder_ == null) {
12430           if (value == null) {
12431             throw new NullPointerException();
12432           }
12433           region_ = value;
12434           onChanged();
12435         } else {
12436           regionBuilder_.setMessage(value);
12437         }
12438         bitField0_ |= 0x00000001;
12439         return this;
12440       }
12441       /**
12442        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
12443        */
12444       public Builder setRegion(
12445           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
12446         if (regionBuilder_ == null) {
12447           region_ = builderForValue.build();
12448           onChanged();
12449         } else {
12450           regionBuilder_.setMessage(builderForValue.build());
12451         }
12452         bitField0_ |= 0x00000001;
12453         return this;
12454       }
12455       /**
12456        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
12457        */
12458       public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
12459         if (regionBuilder_ == null) {
12460           if (((bitField0_ & 0x00000001) == 0x00000001) &&
12461               region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) {
12462             region_ =
12463               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial();
12464           } else {
12465             region_ = value;
12466           }
12467           onChanged();
12468         } else {
12469           regionBuilder_.mergeFrom(value);
12470         }
12471         bitField0_ |= 0x00000001;
12472         return this;
12473       }
12474       /**
12475        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
12476        */
12477       public Builder clearRegion() {
12478         if (regionBuilder_ == null) {
12479           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
12480           onChanged();
12481         } else {
12482           regionBuilder_.clear();
12483         }
12484         bitField0_ = (bitField0_ & ~0x00000001);
12485         return this;
12486       }
12487       /**
12488        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
12489        */
12490       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() {
12491         bitField0_ |= 0x00000001;
12492         onChanged();
12493         return getRegionFieldBuilder().getBuilder();
12494       }
12495       /**
12496        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
12497        */
12498       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
12499         if (regionBuilder_ != null) {
12500           return regionBuilder_.getMessageOrBuilder();
12501         } else {
12502           return region_;
12503         }
12504       }
12505       /**
12506        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
12507        */
12508       private com.google.protobuf.SingleFieldBuilder<
12509           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> 
12510           getRegionFieldBuilder() {
12511         if (regionBuilder_ == null) {
12512           regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
12513               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
12514                   region_,
12515                   getParentForChildren(),
12516                   isClean());
12517           region_ = null;
12518         }
12519         return regionBuilder_;
12520       }
12521 
12522       // required .hbase.pb.MutationProto mutation = 2;
12523       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto mutation_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance();
12524       private com.google.protobuf.SingleFieldBuilder<
12525           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder> mutationBuilder_;
12526       /**
12527        * <code>required .hbase.pb.MutationProto mutation = 2;</code>
12528        */
12529       public boolean hasMutation() {
12530         return ((bitField0_ & 0x00000002) == 0x00000002);
12531       }
12532       /**
12533        * <code>required .hbase.pb.MutationProto mutation = 2;</code>
12534        */
12535       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutation() {
12536         if (mutationBuilder_ == null) {
12537           return mutation_;
12538         } else {
12539           return mutationBuilder_.getMessage();
12540         }
12541       }
12542       /**
12543        * <code>required .hbase.pb.MutationProto mutation = 2;</code>
12544        */
12545       public Builder setMutation(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto value) {
12546         if (mutationBuilder_ == null) {
12547           if (value == null) {
12548             throw new NullPointerException();
12549           }
12550           mutation_ = value;
12551           onChanged();
12552         } else {
12553           mutationBuilder_.setMessage(value);
12554         }
12555         bitField0_ |= 0x00000002;
12556         return this;
12557       }
12558       /**
12559        * <code>required .hbase.pb.MutationProto mutation = 2;</code>
12560        */
12561       public Builder setMutation(
12562           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder builderForValue) {
12563         if (mutationBuilder_ == null) {
12564           mutation_ = builderForValue.build();
12565           onChanged();
12566         } else {
12567           mutationBuilder_.setMessage(builderForValue.build());
12568         }
12569         bitField0_ |= 0x00000002;
12570         return this;
12571       }
12572       /**
12573        * <code>required .hbase.pb.MutationProto mutation = 2;</code>
12574        */
12575       public Builder mergeMutation(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto value) {
12576         if (mutationBuilder_ == null) {
12577           if (((bitField0_ & 0x00000002) == 0x00000002) &&
12578               mutation_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance()) {
12579             mutation_ =
12580               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.newBuilder(mutation_).mergeFrom(value).buildPartial();
12581           } else {
12582             mutation_ = value;
12583           }
12584           onChanged();
12585         } else {
12586           mutationBuilder_.mergeFrom(value);
12587         }
12588         bitField0_ |= 0x00000002;
12589         return this;
12590       }
12591       /**
12592        * <code>required .hbase.pb.MutationProto mutation = 2;</code>
12593        */
12594       public Builder clearMutation() {
12595         if (mutationBuilder_ == null) {
12596           mutation_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance();
12597           onChanged();
12598         } else {
12599           mutationBuilder_.clear();
12600         }
12601         bitField0_ = (bitField0_ & ~0x00000002);
12602         return this;
12603       }
12604       /**
12605        * <code>required .hbase.pb.MutationProto mutation = 2;</code>
12606        */
12607       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder getMutationBuilder() {
12608         bitField0_ |= 0x00000002;
12609         onChanged();
12610         return getMutationFieldBuilder().getBuilder();
12611       }
12612       /**
12613        * <code>required .hbase.pb.MutationProto mutation = 2;</code>
12614        */
12615       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationOrBuilder() {
12616         if (mutationBuilder_ != null) {
12617           return mutationBuilder_.getMessageOrBuilder();
12618         } else {
12619           return mutation_;
12620         }
12621       }
12622       /**
12623        * <code>required .hbase.pb.MutationProto mutation = 2;</code>
12624        */
12625       private com.google.protobuf.SingleFieldBuilder<
12626           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder> 
12627           getMutationFieldBuilder() {
12628         if (mutationBuilder_ == null) {
12629           mutationBuilder_ = new com.google.protobuf.SingleFieldBuilder<
12630               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder>(
12631                   mutation_,
12632                   getParentForChildren(),
12633                   isClean());
12634           mutation_ = null;
12635         }
12636         return mutationBuilder_;
12637       }
12638 
12639       // optional .hbase.pb.Condition condition = 3;
12640       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance();
12641       private com.google.protobuf.SingleFieldBuilder<
12642           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder> conditionBuilder_;
12643       /**
12644        * <code>optional .hbase.pb.Condition condition = 3;</code>
12645        */
12646       public boolean hasCondition() {
12647         return ((bitField0_ & 0x00000004) == 0x00000004);
12648       }
12649       /**
12650        * <code>optional .hbase.pb.Condition condition = 3;</code>
12651        */
12652       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition getCondition() {
12653         if (conditionBuilder_ == null) {
12654           return condition_;
12655         } else {
12656           return conditionBuilder_.getMessage();
12657         }
12658       }
12659       /**
12660        * <code>optional .hbase.pb.Condition condition = 3;</code>
12661        */
12662       public Builder setCondition(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition value) {
12663         if (conditionBuilder_ == null) {
12664           if (value == null) {
12665             throw new NullPointerException();
12666           }
12667           condition_ = value;
12668           onChanged();
12669         } else {
12670           conditionBuilder_.setMessage(value);
12671         }
12672         bitField0_ |= 0x00000004;
12673         return this;
12674       }
12675       /**
12676        * <code>optional .hbase.pb.Condition condition = 3;</code>
12677        */
12678       public Builder setCondition(
12679           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder builderForValue) {
12680         if (conditionBuilder_ == null) {
12681           condition_ = builderForValue.build();
12682           onChanged();
12683         } else {
12684           conditionBuilder_.setMessage(builderForValue.build());
12685         }
12686         bitField0_ |= 0x00000004;
12687         return this;
12688       }
12689       /**
12690        * <code>optional .hbase.pb.Condition condition = 3;</code>
12691        */
12692       public Builder mergeCondition(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition value) {
12693         if (conditionBuilder_ == null) {
12694           if (((bitField0_ & 0x00000004) == 0x00000004) &&
12695               condition_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance()) {
12696             condition_ =
12697               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.newBuilder(condition_).mergeFrom(value).buildPartial();
12698           } else {
12699             condition_ = value;
12700           }
12701           onChanged();
12702         } else {
12703           conditionBuilder_.mergeFrom(value);
12704         }
12705         bitField0_ |= 0x00000004;
12706         return this;
12707       }
12708       /**
12709        * <code>optional .hbase.pb.Condition condition = 3;</code>
12710        */
12711       public Builder clearCondition() {
12712         if (conditionBuilder_ == null) {
12713           condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance();
12714           onChanged();
12715         } else {
12716           conditionBuilder_.clear();
12717         }
12718         bitField0_ = (bitField0_ & ~0x00000004);
12719         return this;
12720       }
12721       /**
12722        * <code>optional .hbase.pb.Condition condition = 3;</code>
12723        */
12724       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder getConditionBuilder() {
12725         bitField0_ |= 0x00000004;
12726         onChanged();
12727         return getConditionFieldBuilder().getBuilder();
12728       }
12729       /**
12730        * <code>optional .hbase.pb.Condition condition = 3;</code>
12731        */
12732       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder getConditionOrBuilder() {
12733         if (conditionBuilder_ != null) {
12734           return conditionBuilder_.getMessageOrBuilder();
12735         } else {
12736           return condition_;
12737         }
12738       }
12739       /**
12740        * <code>optional .hbase.pb.Condition condition = 3;</code>
12741        */
12742       private com.google.protobuf.SingleFieldBuilder<
12743           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder> 
12744           getConditionFieldBuilder() {
12745         if (conditionBuilder_ == null) {
12746           conditionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
12747               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder>(
12748                   condition_,
12749                   getParentForChildren(),
12750                   isClean());
12751           condition_ = null;
12752         }
12753         return conditionBuilder_;
12754       }
12755 
12756       // optional uint64 nonce_group = 4;
12757       private long nonceGroup_ ;
12758       /**
12759        * <code>optional uint64 nonce_group = 4;</code>
12760        */
12761       public boolean hasNonceGroup() {
12762         return ((bitField0_ & 0x00000008) == 0x00000008);
12763       }
12764       /**
12765        * <code>optional uint64 nonce_group = 4;</code>
12766        */
12767       public long getNonceGroup() {
12768         return nonceGroup_;
12769       }
12770       /**
12771        * <code>optional uint64 nonce_group = 4;</code>
12772        */
12773       public Builder setNonceGroup(long value) {
12774         bitField0_ |= 0x00000008;
12775         nonceGroup_ = value;
12776         onChanged();
12777         return this;
12778       }
12779       /**
12780        * <code>optional uint64 nonce_group = 4;</code>
12781        */
12782       public Builder clearNonceGroup() {
12783         bitField0_ = (bitField0_ & ~0x00000008);
12784         nonceGroup_ = 0L;
12785         onChanged();
12786         return this;
12787       }
12788 
12789       // @@protoc_insertion_point(builder_scope:hbase.pb.MutateRequest)
12790     }
12791 
12792     static {
12793       defaultInstance = new MutateRequest(true);
12794       defaultInstance.initFields();
12795     }
12796 
12797     // @@protoc_insertion_point(class_scope:hbase.pb.MutateRequest)
12798   }
12799 
12800   public interface MutateResponseOrBuilder
12801       extends com.google.protobuf.MessageOrBuilder {
12802 
12803     // optional .hbase.pb.Result result = 1;
12804     /**
12805      * <code>optional .hbase.pb.Result result = 1;</code>
12806      */
12807     boolean hasResult();
12808     /**
12809      * <code>optional .hbase.pb.Result result = 1;</code>
12810      */
12811     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult();
12812     /**
12813      * <code>optional .hbase.pb.Result result = 1;</code>
12814      */
12815     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder();
12816 
12817     // optional bool processed = 2;
12818     /**
12819      * <code>optional bool processed = 2;</code>
12820      *
12821      * <pre>
12822      * used for mutate to indicate processed only
12823      * </pre>
12824      */
12825     boolean hasProcessed();
12826     /**
12827      * <code>optional bool processed = 2;</code>
12828      *
12829      * <pre>
12830      * used for mutate to indicate processed only
12831      * </pre>
12832      */
12833     boolean getProcessed();
12834   }
12835   /**
12836    * Protobuf type {@code hbase.pb.MutateResponse}
12837    */
12838   public static final class MutateResponse extends
12839       com.google.protobuf.GeneratedMessage
12840       implements MutateResponseOrBuilder {
12841     // Use MutateResponse.newBuilder() to construct.
12842     private MutateResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
12843       super(builder);
12844       this.unknownFields = builder.getUnknownFields();
12845     }
12846     private MutateResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
12847 
12848     private static final MutateResponse defaultInstance;
12849     public static MutateResponse getDefaultInstance() {
12850       return defaultInstance;
12851     }
12852 
12853     public MutateResponse getDefaultInstanceForType() {
12854       return defaultInstance;
12855     }
12856 
12857     private final com.google.protobuf.UnknownFieldSet unknownFields;
12858     @java.lang.Override
12859     public final com.google.protobuf.UnknownFieldSet
12860         getUnknownFields() {
12861       return this.unknownFields;
12862     }
12863     private MutateResponse(
12864         com.google.protobuf.CodedInputStream input,
12865         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12866         throws com.google.protobuf.InvalidProtocolBufferException {
12867       initFields();
12868       int mutable_bitField0_ = 0;
12869       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
12870           com.google.protobuf.UnknownFieldSet.newBuilder();
12871       try {
12872         boolean done = false;
12873         while (!done) {
12874           int tag = input.readTag();
12875           switch (tag) {
12876             case 0:
12877               done = true;
12878               break;
12879             default: {
12880               if (!parseUnknownField(input, unknownFields,
12881                                      extensionRegistry, tag)) {
12882                 done = true;
12883               }
12884               break;
12885             }
12886             case 10: {
12887               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder subBuilder = null;
12888               if (((bitField0_ & 0x00000001) == 0x00000001)) {
12889                 subBuilder = result_.toBuilder();
12890               }
12891               result_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.PARSER, extensionRegistry);
12892               if (subBuilder != null) {
12893                 subBuilder.mergeFrom(result_);
12894                 result_ = subBuilder.buildPartial();
12895               }
12896               bitField0_ |= 0x00000001;
12897               break;
12898             }
12899             case 16: {
12900               bitField0_ |= 0x00000002;
12901               processed_ = input.readBool();
12902               break;
12903             }
12904           }
12905         }
12906       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
12907         throw e.setUnfinishedMessage(this);
12908       } catch (java.io.IOException e) {
12909         throw new com.google.protobuf.InvalidProtocolBufferException(
12910             e.getMessage()).setUnfinishedMessage(this);
12911       } finally {
12912         this.unknownFields = unknownFields.build();
12913         makeExtensionsImmutable();
12914       }
12915     }
12916     public static final com.google.protobuf.Descriptors.Descriptor
12917         getDescriptor() {
12918       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutateResponse_descriptor;
12919     }
12920 
12921     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
12922         internalGetFieldAccessorTable() {
12923       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutateResponse_fieldAccessorTable
12924           .ensureFieldAccessorsInitialized(
12925               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.Builder.class);
12926     }
12927 
12928     public static com.google.protobuf.Parser<MutateResponse> PARSER =
12929         new com.google.protobuf.AbstractParser<MutateResponse>() {
12930       public MutateResponse parsePartialFrom(
12931           com.google.protobuf.CodedInputStream input,
12932           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12933           throws com.google.protobuf.InvalidProtocolBufferException {
12934         return new MutateResponse(input, extensionRegistry);
12935       }
12936     };
12937 
12938     @java.lang.Override
12939     public com.google.protobuf.Parser<MutateResponse> getParserForType() {
12940       return PARSER;
12941     }
12942 
12943     private int bitField0_;
12944     // optional .hbase.pb.Result result = 1;
12945     public static final int RESULT_FIELD_NUMBER = 1;
12946     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result_;
12947     /**
12948      * <code>optional .hbase.pb.Result result = 1;</code>
12949      */
12950     public boolean hasResult() {
12951       return ((bitField0_ & 0x00000001) == 0x00000001);
12952     }
12953     /**
12954      * <code>optional .hbase.pb.Result result = 1;</code>
12955      */
12956     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult() {
12957       return result_;
12958     }
12959     /**
12960      * <code>optional .hbase.pb.Result result = 1;</code>
12961      */
12962     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() {
12963       return result_;
12964     }
12965 
12966     // optional bool processed = 2;
12967     public static final int PROCESSED_FIELD_NUMBER = 2;
12968     private boolean processed_;
12969     /**
12970      * <code>optional bool processed = 2;</code>
12971      *
12972      * <pre>
12973      * used for mutate to indicate processed only
12974      * </pre>
12975      */
12976     public boolean hasProcessed() {
12977       return ((bitField0_ & 0x00000002) == 0x00000002);
12978     }
12979     /**
12980      * <code>optional bool processed = 2;</code>
12981      *
12982      * <pre>
12983      * used for mutate to indicate processed only
12984      * </pre>
12985      */
12986     public boolean getProcessed() {
12987       return processed_;
12988     }
12989 
12990     private void initFields() {
12991       result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
12992       processed_ = false;
12993     }
12994     private byte memoizedIsInitialized = -1;
12995     public final boolean isInitialized() {
12996       byte isInitialized = memoizedIsInitialized;
12997       if (isInitialized != -1) return isInitialized == 1;
12998 
12999       memoizedIsInitialized = 1;
13000       return true;
13001     }
13002 
13003     public void writeTo(com.google.protobuf.CodedOutputStream output)
13004                         throws java.io.IOException {
13005       getSerializedSize();
13006       if (((bitField0_ & 0x00000001) == 0x00000001)) {
13007         output.writeMessage(1, result_);
13008       }
13009       if (((bitField0_ & 0x00000002) == 0x00000002)) {
13010         output.writeBool(2, processed_);
13011       }
13012       getUnknownFields().writeTo(output);
13013     }
13014 
13015     private int memoizedSerializedSize = -1;
13016     public int getSerializedSize() {
13017       int size = memoizedSerializedSize;
13018       if (size != -1) return size;
13019 
13020       size = 0;
13021       if (((bitField0_ & 0x00000001) == 0x00000001)) {
13022         size += com.google.protobuf.CodedOutputStream
13023           .computeMessageSize(1, result_);
13024       }
13025       if (((bitField0_ & 0x00000002) == 0x00000002)) {
13026         size += com.google.protobuf.CodedOutputStream
13027           .computeBoolSize(2, processed_);
13028       }
13029       size += getUnknownFields().getSerializedSize();
13030       memoizedSerializedSize = size;
13031       return size;
13032     }
13033 
13034     private static final long serialVersionUID = 0L;
13035     @java.lang.Override
13036     protected java.lang.Object writeReplace()
13037         throws java.io.ObjectStreamException {
13038       return super.writeReplace();
13039     }
13040 
13041     @java.lang.Override
13042     public boolean equals(final java.lang.Object obj) {
13043       if (obj == this) {
13044        return true;
13045       }
13046       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse)) {
13047         return super.equals(obj);
13048       }
13049       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse) obj;
13050 
13051       boolean result = true;
13052       result = result && (hasResult() == other.hasResult());
13053       if (hasResult()) {
13054         result = result && getResult()
13055             .equals(other.getResult());
13056       }
13057       result = result && (hasProcessed() == other.hasProcessed());
13058       if (hasProcessed()) {
13059         result = result && (getProcessed()
13060             == other.getProcessed());
13061       }
13062       result = result &&
13063           getUnknownFields().equals(other.getUnknownFields());
13064       return result;
13065     }
13066 
13067     private int memoizedHashCode = 0;
13068     @java.lang.Override
13069     public int hashCode() {
13070       if (memoizedHashCode != 0) {
13071         return memoizedHashCode;
13072       }
13073       int hash = 41;
13074       hash = (19 * hash) + getDescriptorForType().hashCode();
13075       if (hasResult()) {
13076         hash = (37 * hash) + RESULT_FIELD_NUMBER;
13077         hash = (53 * hash) + getResult().hashCode();
13078       }
13079       if (hasProcessed()) {
13080         hash = (37 * hash) + PROCESSED_FIELD_NUMBER;
13081         hash = (53 * hash) + hashBoolean(getProcessed());
13082       }
13083       hash = (29 * hash) + getUnknownFields().hashCode();
13084       memoizedHashCode = hash;
13085       return hash;
13086     }
13087 
13088     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom(
13089         com.google.protobuf.ByteString data)
13090         throws com.google.protobuf.InvalidProtocolBufferException {
13091       return PARSER.parseFrom(data);
13092     }
13093     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom(
13094         com.google.protobuf.ByteString data,
13095         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13096         throws com.google.protobuf.InvalidProtocolBufferException {
13097       return PARSER.parseFrom(data, extensionRegistry);
13098     }
13099     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom(byte[] data)
13100         throws com.google.protobuf.InvalidProtocolBufferException {
13101       return PARSER.parseFrom(data);
13102     }
13103     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom(
13104         byte[] data,
13105         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13106         throws com.google.protobuf.InvalidProtocolBufferException {
13107       return PARSER.parseFrom(data, extensionRegistry);
13108     }
13109     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom(java.io.InputStream input)
13110         throws java.io.IOException {
13111       return PARSER.parseFrom(input);
13112     }
13113     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom(
13114         java.io.InputStream input,
13115         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13116         throws java.io.IOException {
13117       return PARSER.parseFrom(input, extensionRegistry);
13118     }
13119     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseDelimitedFrom(java.io.InputStream input)
13120         throws java.io.IOException {
13121       return PARSER.parseDelimitedFrom(input);
13122     }
13123     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseDelimitedFrom(
13124         java.io.InputStream input,
13125         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13126         throws java.io.IOException {
13127       return PARSER.parseDelimitedFrom(input, extensionRegistry);
13128     }
13129     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom(
13130         com.google.protobuf.CodedInputStream input)
13131         throws java.io.IOException {
13132       return PARSER.parseFrom(input);
13133     }
13134     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom(
13135         com.google.protobuf.CodedInputStream input,
13136         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13137         throws java.io.IOException {
13138       return PARSER.parseFrom(input, extensionRegistry);
13139     }
13140 
13141     public static Builder newBuilder() { return Builder.create(); }
13142     public Builder newBuilderForType() { return newBuilder(); }
13143     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse prototype) {
13144       return newBuilder().mergeFrom(prototype);
13145     }
13146     public Builder toBuilder() { return newBuilder(this); }
13147 
13148     @java.lang.Override
13149     protected Builder newBuilderForType(
13150         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
13151       Builder builder = new Builder(parent);
13152       return builder;
13153     }
13154     /**
13155      * Protobuf type {@code hbase.pb.MutateResponse}
13156      */
13157     public static final class Builder extends
13158         com.google.protobuf.GeneratedMessage.Builder<Builder>
13159        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponseOrBuilder {
13160       public static final com.google.protobuf.Descriptors.Descriptor
13161           getDescriptor() {
13162         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutateResponse_descriptor;
13163       }
13164 
13165       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
13166           internalGetFieldAccessorTable() {
13167         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutateResponse_fieldAccessorTable
13168             .ensureFieldAccessorsInitialized(
13169                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.Builder.class);
13170       }
13171 
13172       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.newBuilder()
13173       private Builder() {
13174         maybeForceBuilderInitialization();
13175       }
13176 
13177       private Builder(
13178           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
13179         super(parent);
13180         maybeForceBuilderInitialization();
13181       }
13182       private void maybeForceBuilderInitialization() {
13183         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
13184           getResultFieldBuilder();
13185         }
13186       }
13187       private static Builder create() {
13188         return new Builder();
13189       }
13190 
13191       public Builder clear() {
13192         super.clear();
13193         if (resultBuilder_ == null) {
13194           result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
13195         } else {
13196           resultBuilder_.clear();
13197         }
13198         bitField0_ = (bitField0_ & ~0x00000001);
13199         processed_ = false;
13200         bitField0_ = (bitField0_ & ~0x00000002);
13201         return this;
13202       }
13203 
13204       public Builder clone() {
13205         return create().mergeFrom(buildPartial());
13206       }
13207 
13208       public com.google.protobuf.Descriptors.Descriptor
13209           getDescriptorForType() {
13210         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutateResponse_descriptor;
13211       }
13212 
13213       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse getDefaultInstanceForType() {
13214         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance();
13215       }
13216 
13217       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse build() {
13218         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse result = buildPartial();
13219         if (!result.isInitialized()) {
13220           throw newUninitializedMessageException(result);
13221         }
13222         return result;
13223       }
13224 
13225       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse buildPartial() {
13226         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse(this);
13227         int from_bitField0_ = bitField0_;
13228         int to_bitField0_ = 0;
13229         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
13230           to_bitField0_ |= 0x00000001;
13231         }
13232         if (resultBuilder_ == null) {
13233           result.result_ = result_;
13234         } else {
13235           result.result_ = resultBuilder_.build();
13236         }
13237         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
13238           to_bitField0_ |= 0x00000002;
13239         }
13240         result.processed_ = processed_;
13241         result.bitField0_ = to_bitField0_;
13242         onBuilt();
13243         return result;
13244       }
13245 
13246       public Builder mergeFrom(com.google.protobuf.Message other) {
13247         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse) {
13248           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse)other);
13249         } else {
13250           super.mergeFrom(other);
13251           return this;
13252         }
13253       }
13254 
13255       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse other) {
13256         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance()) return this;
13257         if (other.hasResult()) {
13258           mergeResult(other.getResult());
13259         }
13260         if (other.hasProcessed()) {
13261           setProcessed(other.getProcessed());
13262         }
13263         this.mergeUnknownFields(other.getUnknownFields());
13264         return this;
13265       }
13266 
13267       public final boolean isInitialized() {
13268         return true;
13269       }
13270 
13271       public Builder mergeFrom(
13272           com.google.protobuf.CodedInputStream input,
13273           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13274           throws java.io.IOException {
13275         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parsedMessage = null;
13276         try {
13277           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
13278         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
13279           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse) e.getUnfinishedMessage();
13280           throw e;
13281         } finally {
13282           if (parsedMessage != null) {
13283             mergeFrom(parsedMessage);
13284           }
13285         }
13286         return this;
13287       }
13288       private int bitField0_;
13289 
13290       // optional .hbase.pb.Result result = 1;
13291       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
13292       private com.google.protobuf.SingleFieldBuilder<
13293           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> resultBuilder_;
13294       /**
13295        * <code>optional .hbase.pb.Result result = 1;</code>
13296        */
13297       public boolean hasResult() {
13298         return ((bitField0_ & 0x00000001) == 0x00000001);
13299       }
13300       /**
13301        * <code>optional .hbase.pb.Result result = 1;</code>
13302        */
13303       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult() {
13304         if (resultBuilder_ == null) {
13305           return result_;
13306         } else {
13307           return resultBuilder_.getMessage();
13308         }
13309       }
13310       /**
13311        * <code>optional .hbase.pb.Result result = 1;</code>
13312        */
13313       public Builder setResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) {
13314         if (resultBuilder_ == null) {
13315           if (value == null) {
13316             throw new NullPointerException();
13317           }
13318           result_ = value;
13319           onChanged();
13320         } else {
13321           resultBuilder_.setMessage(value);
13322         }
13323         bitField0_ |= 0x00000001;
13324         return this;
13325       }
13326       /**
13327        * <code>optional .hbase.pb.Result result = 1;</code>
13328        */
13329       public Builder setResult(
13330           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) {
13331         if (resultBuilder_ == null) {
13332           result_ = builderForValue.build();
13333           onChanged();
13334         } else {
13335           resultBuilder_.setMessage(builderForValue.build());
13336         }
13337         bitField0_ |= 0x00000001;
13338         return this;
13339       }
13340       /**
13341        * <code>optional .hbase.pb.Result result = 1;</code>
13342        */
13343       public Builder mergeResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) {
13344         if (resultBuilder_ == null) {
13345           if (((bitField0_ & 0x00000001) == 0x00000001) &&
13346               result_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance()) {
13347             result_ =
13348               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder(result_).mergeFrom(value).buildPartial();
13349           } else {
13350             result_ = value;
13351           }
13352           onChanged();
13353         } else {
13354           resultBuilder_.mergeFrom(value);
13355         }
13356         bitField0_ |= 0x00000001;
13357         return this;
13358       }
13359       /**
13360        * <code>optional .hbase.pb.Result result = 1;</code>
13361        */
13362       public Builder clearResult() {
13363         if (resultBuilder_ == null) {
13364           result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
13365           onChanged();
13366         } else {
13367           resultBuilder_.clear();
13368         }
13369         bitField0_ = (bitField0_ & ~0x00000001);
13370         return this;
13371       }
13372       /**
13373        * <code>optional .hbase.pb.Result result = 1;</code>
13374        */
13375       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder getResultBuilder() {
13376         bitField0_ |= 0x00000001;
13377         onChanged();
13378         return getResultFieldBuilder().getBuilder();
13379       }
13380       /**
13381        * <code>optional .hbase.pb.Result result = 1;</code>
13382        */
13383       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() {
13384         if (resultBuilder_ != null) {
13385           return resultBuilder_.getMessageOrBuilder();
13386         } else {
13387           return result_;
13388         }
13389       }
13390       /**
13391        * <code>optional .hbase.pb.Result result = 1;</code>
13392        */
13393       private com.google.protobuf.SingleFieldBuilder<
13394           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> 
13395           getResultFieldBuilder() {
13396         if (resultBuilder_ == null) {
13397           resultBuilder_ = new com.google.protobuf.SingleFieldBuilder<
13398               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder>(
13399                   result_,
13400                   getParentForChildren(),
13401                   isClean());
13402           result_ = null;
13403         }
13404         return resultBuilder_;
13405       }
13406 
13407       // optional bool processed = 2;
13408       private boolean processed_ ;
13409       /**
13410        * <code>optional bool processed = 2;</code>
13411        *
13412        * <pre>
13413        * used for mutate to indicate processed only
13414        * </pre>
13415        */
13416       public boolean hasProcessed() {
13417         return ((bitField0_ & 0x00000002) == 0x00000002);
13418       }
13419       /**
13420        * <code>optional bool processed = 2;</code>
13421        *
13422        * <pre>
13423        * used for mutate to indicate processed only
13424        * </pre>
13425        */
13426       public boolean getProcessed() {
13427         return processed_;
13428       }
13429       /**
13430        * <code>optional bool processed = 2;</code>
13431        *
13432        * <pre>
13433        * used for mutate to indicate processed only
13434        * </pre>
13435        */
13436       public Builder setProcessed(boolean value) {
13437         bitField0_ |= 0x00000002;
13438         processed_ = value;
13439         onChanged();
13440         return this;
13441       }
13442       /**
13443        * <code>optional bool processed = 2;</code>
13444        *
13445        * <pre>
13446        * used for mutate to indicate processed only
13447        * </pre>
13448        */
13449       public Builder clearProcessed() {
13450         bitField0_ = (bitField0_ & ~0x00000002);
13451         processed_ = false;
13452         onChanged();
13453         return this;
13454       }
13455 
13456       // @@protoc_insertion_point(builder_scope:hbase.pb.MutateResponse)
13457     }
13458 
13459     static {
13460       defaultInstance = new MutateResponse(true);
13461       defaultInstance.initFields();
13462     }
13463 
13464     // @@protoc_insertion_point(class_scope:hbase.pb.MutateResponse)
13465   }
13466 
13467   public interface ScanOrBuilder
13468       extends com.google.protobuf.MessageOrBuilder {
13469 
13470     // repeated .hbase.pb.Column column = 1;
13471     /**
13472      * <code>repeated .hbase.pb.Column column = 1;</code>
13473      */
13474     java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> 
13475         getColumnList();
13476     /**
13477      * <code>repeated .hbase.pb.Column column = 1;</code>
13478      */
13479     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index);
13480     /**
13481      * <code>repeated .hbase.pb.Column column = 1;</code>
13482      */
13483     int getColumnCount();
13484     /**
13485      * <code>repeated .hbase.pb.Column column = 1;</code>
13486      */
13487     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder> 
13488         getColumnOrBuilderList();
13489     /**
13490      * <code>repeated .hbase.pb.Column column = 1;</code>
13491      */
13492     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder(
13493         int index);
13494 
13495     // repeated .hbase.pb.NameBytesPair attribute = 2;
13496     /**
13497      * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
13498      */
13499     java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> 
13500         getAttributeList();
13501     /**
13502      * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
13503      */
13504     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index);
13505     /**
13506      * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
13507      */
13508     int getAttributeCount();
13509     /**
13510      * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
13511      */
13512     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> 
13513         getAttributeOrBuilderList();
13514     /**
13515      * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
13516      */
13517     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder(
13518         int index);
13519 
13520     // optional bytes start_row = 3;
13521     /**
13522      * <code>optional bytes start_row = 3;</code>
13523      */
13524     boolean hasStartRow();
13525     /**
13526      * <code>optional bytes start_row = 3;</code>
13527      */
13528     com.google.protobuf.ByteString getStartRow();
13529 
13530     // optional bytes stop_row = 4;
13531     /**
13532      * <code>optional bytes stop_row = 4;</code>
13533      */
13534     boolean hasStopRow();
13535     /**
13536      * <code>optional bytes stop_row = 4;</code>
13537      */
13538     com.google.protobuf.ByteString getStopRow();
13539 
13540     // optional .hbase.pb.Filter filter = 5;
13541     /**
13542      * <code>optional .hbase.pb.Filter filter = 5;</code>
13543      */
13544     boolean hasFilter();
13545     /**
13546      * <code>optional .hbase.pb.Filter filter = 5;</code>
13547      */
13548     org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter();
13549     /**
13550      * <code>optional .hbase.pb.Filter filter = 5;</code>
13551      */
13552     org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder();
13553 
13554     // optional .hbase.pb.TimeRange time_range = 6;
13555     /**
13556      * <code>optional .hbase.pb.TimeRange time_range = 6;</code>
13557      */
13558     boolean hasTimeRange();
13559     /**
13560      * <code>optional .hbase.pb.TimeRange time_range = 6;</code>
13561      */
13562     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange();
13563     /**
13564      * <code>optional .hbase.pb.TimeRange time_range = 6;</code>
13565      */
13566     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder();
13567 
13568     // optional uint32 max_versions = 7 [default = 1];
13569     /**
13570      * <code>optional uint32 max_versions = 7 [default = 1];</code>
13571      */
13572     boolean hasMaxVersions();
13573     /**
13574      * <code>optional uint32 max_versions = 7 [default = 1];</code>
13575      */
13576     int getMaxVersions();
13577 
13578     // optional bool cache_blocks = 8 [default = true];
13579     /**
13580      * <code>optional bool cache_blocks = 8 [default = true];</code>
13581      */
13582     boolean hasCacheBlocks();
13583     /**
13584      * <code>optional bool cache_blocks = 8 [default = true];</code>
13585      */
13586     boolean getCacheBlocks();
13587 
13588     // optional uint32 batch_size = 9;
13589     /**
13590      * <code>optional uint32 batch_size = 9;</code>
13591      */
13592     boolean hasBatchSize();
13593     /**
13594      * <code>optional uint32 batch_size = 9;</code>
13595      */
13596     int getBatchSize();
13597 
13598     // optional uint64 max_result_size = 10;
13599     /**
13600      * <code>optional uint64 max_result_size = 10;</code>
13601      */
13602     boolean hasMaxResultSize();
13603     /**
13604      * <code>optional uint64 max_result_size = 10;</code>
13605      */
13606     long getMaxResultSize();
13607 
13608     // optional uint32 store_limit = 11;
13609     /**
13610      * <code>optional uint32 store_limit = 11;</code>
13611      */
13612     boolean hasStoreLimit();
13613     /**
13614      * <code>optional uint32 store_limit = 11;</code>
13615      */
13616     int getStoreLimit();
13617 
13618     // optional uint32 store_offset = 12;
13619     /**
13620      * <code>optional uint32 store_offset = 12;</code>
13621      */
13622     boolean hasStoreOffset();
13623     /**
13624      * <code>optional uint32 store_offset = 12;</code>
13625      */
13626     int getStoreOffset();
13627 
13628     // optional bool load_column_families_on_demand = 13;
13629     /**
13630      * <code>optional bool load_column_families_on_demand = 13;</code>
13631      *
13632      * <pre>
13633      * DO NOT add defaults to load_column_families_on_demand. 
13634      * </pre>
13635      */
13636     boolean hasLoadColumnFamiliesOnDemand();
13637     /**
13638      * <code>optional bool load_column_families_on_demand = 13;</code>
13639      *
13640      * <pre>
13641      * DO NOT add defaults to load_column_families_on_demand. 
13642      * </pre>
13643      */
13644     boolean getLoadColumnFamiliesOnDemand();
13645 
13646     // optional bool small = 14;
13647     /**
13648      * <code>optional bool small = 14;</code>
13649      */
13650     boolean hasSmall();
13651     /**
13652      * <code>optional bool small = 14;</code>
13653      */
13654     boolean getSmall();
13655 
13656     // optional bool reversed = 15 [default = false];
13657     /**
13658      * <code>optional bool reversed = 15 [default = false];</code>
13659      */
13660     boolean hasReversed();
13661     /**
13662      * <code>optional bool reversed = 15 [default = false];</code>
13663      */
13664     boolean getReversed();
13665 
13666     // optional .hbase.pb.Consistency consistency = 16 [default = STRONG];
13667     /**
13668      * <code>optional .hbase.pb.Consistency consistency = 16 [default = STRONG];</code>
13669      */
13670     boolean hasConsistency();
13671     /**
13672      * <code>optional .hbase.pb.Consistency consistency = 16 [default = STRONG];</code>
13673      */
13674     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency getConsistency();
13675 
13676     // optional uint32 caching = 17;
13677     /**
13678      * <code>optional uint32 caching = 17;</code>
13679      */
13680     boolean hasCaching();
13681     /**
13682      * <code>optional uint32 caching = 17;</code>
13683      */
13684     int getCaching();
13685 
13686     // optional bool allow_partial_results = 18;
13687     /**
13688      * <code>optional bool allow_partial_results = 18;</code>
13689      */
13690     boolean hasAllowPartialResults();
13691     /**
13692      * <code>optional bool allow_partial_results = 18;</code>
13693      */
13694     boolean getAllowPartialResults();
13695   }
13696   /**
13697    * Protobuf type {@code hbase.pb.Scan}
13698    *
13699    * <pre>
13700    **
13701    * Instead of get from a table, you can scan it with optional filters.
13702    * You can specify the row key range, time range, the columns/families
13703    * to scan and so on.
13704    *
13705    * This scan is used the first time in a scan request. The response of
13706    * the initial scan will return a scanner id, which should be used to
13707    * fetch result batches later on before it is closed.
13708    * </pre>
13709    */
13710   public static final class Scan extends
13711       com.google.protobuf.GeneratedMessage
13712       implements ScanOrBuilder {
13713     // Use Scan.newBuilder() to construct.
13714     private Scan(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
13715       super(builder);
13716       this.unknownFields = builder.getUnknownFields();
13717     }
13718     private Scan(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
13719 
13720     private static final Scan defaultInstance;
13721     public static Scan getDefaultInstance() {
13722       return defaultInstance;
13723     }
13724 
13725     public Scan getDefaultInstanceForType() {
13726       return defaultInstance;
13727     }
13728 
13729     private final com.google.protobuf.UnknownFieldSet unknownFields;
13730     @java.lang.Override
13731     public final com.google.protobuf.UnknownFieldSet
13732         getUnknownFields() {
13733       return this.unknownFields;
13734     }
13735     private Scan(
13736         com.google.protobuf.CodedInputStream input,
13737         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13738         throws com.google.protobuf.InvalidProtocolBufferException {
13739       initFields();
13740       int mutable_bitField0_ = 0;
13741       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
13742           com.google.protobuf.UnknownFieldSet.newBuilder();
13743       try {
13744         boolean done = false;
13745         while (!done) {
13746           int tag = input.readTag();
13747           switch (tag) {
13748             case 0:
13749               done = true;
13750               break;
13751             default: {
13752               if (!parseUnknownField(input, unknownFields,
13753                                      extensionRegistry, tag)) {
13754                 done = true;
13755               }
13756               break;
13757             }
13758             case 10: {
13759               if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
13760                 column_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column>();
13761                 mutable_bitField0_ |= 0x00000001;
13762               }
13763               column_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.PARSER, extensionRegistry));
13764               break;
13765             }
13766             case 18: {
13767               if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
13768                 attribute_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair>();
13769                 mutable_bitField0_ |= 0x00000002;
13770               }
13771               attribute_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.PARSER, extensionRegistry));
13772               break;
13773             }
13774             case 26: {
13775               bitField0_ |= 0x00000001;
13776               startRow_ = input.readBytes();
13777               break;
13778             }
13779             case 34: {
13780               bitField0_ |= 0x00000002;
13781               stopRow_ = input.readBytes();
13782               break;
13783             }
13784             case 42: {
13785               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder subBuilder = null;
13786               if (((bitField0_ & 0x00000004) == 0x00000004)) {
13787                 subBuilder = filter_.toBuilder();
13788               }
13789               filter_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.PARSER, extensionRegistry);
13790               if (subBuilder != null) {
13791                 subBuilder.mergeFrom(filter_);
13792                 filter_ = subBuilder.buildPartial();
13793               }
13794               bitField0_ |= 0x00000004;
13795               break;
13796             }
13797             case 50: {
13798               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder subBuilder = null;
13799               if (((bitField0_ & 0x00000008) == 0x00000008)) {
13800                 subBuilder = timeRange_.toBuilder();
13801               }
13802               timeRange_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.PARSER, extensionRegistry);
13803               if (subBuilder != null) {
13804                 subBuilder.mergeFrom(timeRange_);
13805                 timeRange_ = subBuilder.buildPartial();
13806               }
13807               bitField0_ |= 0x00000008;
13808               break;
13809             }
13810             case 56: {
13811               bitField0_ |= 0x00000010;
13812               maxVersions_ = input.readUInt32();
13813               break;
13814             }
13815             case 64: {
13816               bitField0_ |= 0x00000020;
13817               cacheBlocks_ = input.readBool();
13818               break;
13819             }
13820             case 72: {
13821               bitField0_ |= 0x00000040;
13822               batchSize_ = input.readUInt32();
13823               break;
13824             }
13825             case 80: {
13826               bitField0_ |= 0x00000080;
13827               maxResultSize_ = input.readUInt64();
13828               break;
13829             }
13830             case 88: {
13831               bitField0_ |= 0x00000100;
13832               storeLimit_ = input.readUInt32();
13833               break;
13834             }
13835             case 96: {
13836               bitField0_ |= 0x00000200;
13837               storeOffset_ = input.readUInt32();
13838               break;
13839             }
13840             case 104: {
13841               bitField0_ |= 0x00000400;
13842               loadColumnFamiliesOnDemand_ = input.readBool();
13843               break;
13844             }
13845             case 112: {
13846               bitField0_ |= 0x00000800;
13847               small_ = input.readBool();
13848               break;
13849             }
13850             case 120: {
13851               bitField0_ |= 0x00001000;
13852               reversed_ = input.readBool();
13853               break;
13854             }
13855             case 128: {
13856               int rawValue = input.readEnum();
13857               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency value = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency.valueOf(rawValue);
13858               if (value == null) {
13859                 unknownFields.mergeVarintField(16, rawValue);
13860               } else {
13861                 bitField0_ |= 0x00002000;
13862                 consistency_ = value;
13863               }
13864               break;
13865             }
13866             case 136: {
13867               bitField0_ |= 0x00004000;
13868               caching_ = input.readUInt32();
13869               break;
13870             }
13871             case 144: {
13872               bitField0_ |= 0x00008000;
13873               allowPartialResults_ = input.readBool();
13874               break;
13875             }
13876           }
13877         }
13878       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
13879         throw e.setUnfinishedMessage(this);
13880       } catch (java.io.IOException e) {
13881         throw new com.google.protobuf.InvalidProtocolBufferException(
13882             e.getMessage()).setUnfinishedMessage(this);
13883       } finally {
13884         if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
13885           column_ = java.util.Collections.unmodifiableList(column_);
13886         }
13887         if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
13888           attribute_ = java.util.Collections.unmodifiableList(attribute_);
13889         }
13890         this.unknownFields = unknownFields.build();
13891         makeExtensionsImmutable();
13892       }
13893     }
13894     public static final com.google.protobuf.Descriptors.Descriptor
13895         getDescriptor() {
13896       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Scan_descriptor;
13897     }
13898 
13899     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
13900         internalGetFieldAccessorTable() {
13901       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Scan_fieldAccessorTable
13902           .ensureFieldAccessorsInitialized(
13903               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder.class);
13904     }
13905 
13906     public static com.google.protobuf.Parser<Scan> PARSER =
13907         new com.google.protobuf.AbstractParser<Scan>() {
13908       public Scan parsePartialFrom(
13909           com.google.protobuf.CodedInputStream input,
13910           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13911           throws com.google.protobuf.InvalidProtocolBufferException {
13912         return new Scan(input, extensionRegistry);
13913       }
13914     };
13915 
13916     @java.lang.Override
13917     public com.google.protobuf.Parser<Scan> getParserForType() {
13918       return PARSER;
13919     }
13920 
13921     private int bitField0_;
13922     // repeated .hbase.pb.Column column = 1;
13923     public static final int COLUMN_FIELD_NUMBER = 1;
13924     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> column_;
13925     /**
13926      * <code>repeated .hbase.pb.Column column = 1;</code>
13927      */
13928     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> getColumnList() {
13929       return column_;
13930     }
13931     /**
13932      * <code>repeated .hbase.pb.Column column = 1;</code>
13933      */
13934     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder> 
13935         getColumnOrBuilderList() {
13936       return column_;
13937     }
13938     /**
13939      * <code>repeated .hbase.pb.Column column = 1;</code>
13940      */
13941     public int getColumnCount() {
13942       return column_.size();
13943     }
13944     /**
13945      * <code>repeated .hbase.pb.Column column = 1;</code>
13946      */
13947     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index) {
13948       return column_.get(index);
13949     }
13950     /**
13951      * <code>repeated .hbase.pb.Column column = 1;</code>
13952      */
13953     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder(
13954         int index) {
13955       return column_.get(index);
13956     }
13957 
13958     // repeated .hbase.pb.NameBytesPair attribute = 2;
13959     public static final int ATTRIBUTE_FIELD_NUMBER = 2;
13960     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> attribute_;
13961     /**
13962      * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
13963      */
13964     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> getAttributeList() {
13965       return attribute_;
13966     }
13967     /**
13968      * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
13969      */
13970     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> 
13971         getAttributeOrBuilderList() {
13972       return attribute_;
13973     }
13974     /**
13975      * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
13976      */
13977     public int getAttributeCount() {
13978       return attribute_.size();
13979     }
13980     /**
13981      * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
13982      */
13983     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) {
13984       return attribute_.get(index);
13985     }
13986     /**
13987      * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
13988      */
13989     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder(
13990         int index) {
13991       return attribute_.get(index);
13992     }
13993 
13994     // optional bytes start_row = 3;
13995     public static final int START_ROW_FIELD_NUMBER = 3;
13996     private com.google.protobuf.ByteString startRow_;
13997     /**
13998      * <code>optional bytes start_row = 3;</code>
13999      */
14000     public boolean hasStartRow() {
14001       return ((bitField0_ & 0x00000001) == 0x00000001);
14002     }
14003     /**
14004      * <code>optional bytes start_row = 3;</code>
14005      */
14006     public com.google.protobuf.ByteString getStartRow() {
14007       return startRow_;
14008     }
14009 
14010     // optional bytes stop_row = 4;
14011     public static final int STOP_ROW_FIELD_NUMBER = 4;
14012     private com.google.protobuf.ByteString stopRow_;
14013     /**
14014      * <code>optional bytes stop_row = 4;</code>
14015      */
14016     public boolean hasStopRow() {
14017       return ((bitField0_ & 0x00000002) == 0x00000002);
14018     }
14019     /**
14020      * <code>optional bytes stop_row = 4;</code>
14021      */
14022     public com.google.protobuf.ByteString getStopRow() {
14023       return stopRow_;
14024     }
14025 
14026     // optional .hbase.pb.Filter filter = 5;
14027     public static final int FILTER_FIELD_NUMBER = 5;
14028     private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter filter_;
14029     /**
14030      * <code>optional .hbase.pb.Filter filter = 5;</code>
14031      */
14032     public boolean hasFilter() {
14033       return ((bitField0_ & 0x00000004) == 0x00000004);
14034     }
14035     /**
14036      * <code>optional .hbase.pb.Filter filter = 5;</code>
14037      */
14038     public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter() {
14039       return filter_;
14040     }
14041     /**
14042      * <code>optional .hbase.pb.Filter filter = 5;</code>
14043      */
14044     public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder() {
14045       return filter_;
14046     }
14047 
14048     // optional .hbase.pb.TimeRange time_range = 6;
14049     public static final int TIME_RANGE_FIELD_NUMBER = 6;
14050     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_;
14051     /**
14052      * <code>optional .hbase.pb.TimeRange time_range = 6;</code>
14053      */
14054     public boolean hasTimeRange() {
14055       return ((bitField0_ & 0x00000008) == 0x00000008);
14056     }
14057     /**
14058      * <code>optional .hbase.pb.TimeRange time_range = 6;</code>
14059      */
14060     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() {
14061       return timeRange_;
14062     }
14063     /**
14064      * <code>optional .hbase.pb.TimeRange time_range = 6;</code>
14065      */
14066     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() {
14067       return timeRange_;
14068     }
14069 
14070     // optional uint32 max_versions = 7 [default = 1];
14071     public static final int MAX_VERSIONS_FIELD_NUMBER = 7;
14072     private int maxVersions_;
14073     /**
14074      * <code>optional uint32 max_versions = 7 [default = 1];</code>
14075      */
14076     public boolean hasMaxVersions() {
14077       return ((bitField0_ & 0x00000010) == 0x00000010);
14078     }
14079     /**
14080      * <code>optional uint32 max_versions = 7 [default = 1];</code>
14081      */
14082     public int getMaxVersions() {
14083       return maxVersions_;
14084     }
14085 
14086     // optional bool cache_blocks = 8 [default = true];
14087     public static final int CACHE_BLOCKS_FIELD_NUMBER = 8;
14088     private boolean cacheBlocks_;
14089     /**
14090      * <code>optional bool cache_blocks = 8 [default = true];</code>
14091      */
14092     public boolean hasCacheBlocks() {
14093       return ((bitField0_ & 0x00000020) == 0x00000020);
14094     }
14095     /**
14096      * <code>optional bool cache_blocks = 8 [default = true];</code>
14097      */
14098     public boolean getCacheBlocks() {
14099       return cacheBlocks_;
14100     }
14101 
14102     // optional uint32 batch_size = 9;
14103     public static final int BATCH_SIZE_FIELD_NUMBER = 9;
14104     private int batchSize_;
14105     /**
14106      * <code>optional uint32 batch_size = 9;</code>
14107      */
14108     public boolean hasBatchSize() {
14109       return ((bitField0_ & 0x00000040) == 0x00000040);
14110     }
14111     /**
14112      * <code>optional uint32 batch_size = 9;</code>
14113      */
14114     public int getBatchSize() {
14115       return batchSize_;
14116     }
14117 
14118     // optional uint64 max_result_size = 10;
14119     public static final int MAX_RESULT_SIZE_FIELD_NUMBER = 10;
14120     private long maxResultSize_;
14121     /**
14122      * <code>optional uint64 max_result_size = 10;</code>
14123      */
14124     public boolean hasMaxResultSize() {
14125       return ((bitField0_ & 0x00000080) == 0x00000080);
14126     }
14127     /**
14128      * <code>optional uint64 max_result_size = 10;</code>
14129      */
14130     public long getMaxResultSize() {
14131       return maxResultSize_;
14132     }
14133 
14134     // optional uint32 store_limit = 11;
14135     public static final int STORE_LIMIT_FIELD_NUMBER = 11;
14136     private int storeLimit_;
14137     /**
14138      * <code>optional uint32 store_limit = 11;</code>
14139      */
14140     public boolean hasStoreLimit() {
14141       return ((bitField0_ & 0x00000100) == 0x00000100);
14142     }
14143     /**
14144      * <code>optional uint32 store_limit = 11;</code>
14145      */
14146     public int getStoreLimit() {
14147       return storeLimit_;
14148     }
14149 
14150     // optional uint32 store_offset = 12;
14151     public static final int STORE_OFFSET_FIELD_NUMBER = 12;
14152     private int storeOffset_;
14153     /**
14154      * <code>optional uint32 store_offset = 12;</code>
14155      */
14156     public boolean hasStoreOffset() {
14157       return ((bitField0_ & 0x00000200) == 0x00000200);
14158     }
14159     /**
14160      * <code>optional uint32 store_offset = 12;</code>
14161      */
14162     public int getStoreOffset() {
14163       return storeOffset_;
14164     }
14165 
14166     // optional bool load_column_families_on_demand = 13;
14167     public static final int LOAD_COLUMN_FAMILIES_ON_DEMAND_FIELD_NUMBER = 13;
14168     private boolean loadColumnFamiliesOnDemand_;
14169     /**
14170      * <code>optional bool load_column_families_on_demand = 13;</code>
14171      *
14172      * <pre>
14173      * DO NOT add defaults to load_column_families_on_demand. 
14174      * </pre>
14175      */
14176     public boolean hasLoadColumnFamiliesOnDemand() {
14177       return ((bitField0_ & 0x00000400) == 0x00000400);
14178     }
14179     /**
14180      * <code>optional bool load_column_families_on_demand = 13;</code>
14181      *
14182      * <pre>
14183      * DO NOT add defaults to load_column_families_on_demand. 
14184      * </pre>
14185      */
14186     public boolean getLoadColumnFamiliesOnDemand() {
14187       return loadColumnFamiliesOnDemand_;
14188     }
14189 
14190     // optional bool small = 14;
14191     public static final int SMALL_FIELD_NUMBER = 14;
14192     private boolean small_;
14193     /**
14194      * <code>optional bool small = 14;</code>
14195      */
14196     public boolean hasSmall() {
14197       return ((bitField0_ & 0x00000800) == 0x00000800);
14198     }
14199     /**
14200      * <code>optional bool small = 14;</code>
14201      */
14202     public boolean getSmall() {
14203       return small_;
14204     }
14205 
14206     // optional bool reversed = 15 [default = false];
14207     public static final int REVERSED_FIELD_NUMBER = 15;
14208     private boolean reversed_;
14209     /**
14210      * <code>optional bool reversed = 15 [default = false];</code>
14211      */
14212     public boolean hasReversed() {
14213       return ((bitField0_ & 0x00001000) == 0x00001000);
14214     }
14215     /**
14216      * <code>optional bool reversed = 15 [default = false];</code>
14217      */
14218     public boolean getReversed() {
14219       return reversed_;
14220     }
14221 
14222     // optional .hbase.pb.Consistency consistency = 16 [default = STRONG];
14223     public static final int CONSISTENCY_FIELD_NUMBER = 16;
14224     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency consistency_;
14225     /**
14226      * <code>optional .hbase.pb.Consistency consistency = 16 [default = STRONG];</code>
14227      */
14228     public boolean hasConsistency() {
14229       return ((bitField0_ & 0x00002000) == 0x00002000);
14230     }
14231     /**
14232      * <code>optional .hbase.pb.Consistency consistency = 16 [default = STRONG];</code>
14233      */
14234     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency getConsistency() {
14235       return consistency_;
14236     }
14237 
14238     // optional uint32 caching = 17;
14239     public static final int CACHING_FIELD_NUMBER = 17;
14240     private int caching_;
14241     /**
14242      * <code>optional uint32 caching = 17;</code>
14243      */
14244     public boolean hasCaching() {
14245       return ((bitField0_ & 0x00004000) == 0x00004000);
14246     }
14247     /**
14248      * <code>optional uint32 caching = 17;</code>
14249      */
14250     public int getCaching() {
14251       return caching_;
14252     }
14253 
14254     // optional bool allow_partial_results = 18;
14255     public static final int ALLOW_PARTIAL_RESULTS_FIELD_NUMBER = 18;
14256     private boolean allowPartialResults_;
14257     /**
14258      * <code>optional bool allow_partial_results = 18;</code>
14259      */
14260     public boolean hasAllowPartialResults() {
14261       return ((bitField0_ & 0x00008000) == 0x00008000);
14262     }
14263     /**
14264      * <code>optional bool allow_partial_results = 18;</code>
14265      */
14266     public boolean getAllowPartialResults() {
14267       return allowPartialResults_;
14268     }
14269 
14270     private void initFields() {
14271       column_ = java.util.Collections.emptyList();
14272       attribute_ = java.util.Collections.emptyList();
14273       startRow_ = com.google.protobuf.ByteString.EMPTY;
14274       stopRow_ = com.google.protobuf.ByteString.EMPTY;
14275       filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
14276       timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance();
14277       maxVersions_ = 1;
14278       cacheBlocks_ = true;
14279       batchSize_ = 0;
14280       maxResultSize_ = 0L;
14281       storeLimit_ = 0;
14282       storeOffset_ = 0;
14283       loadColumnFamiliesOnDemand_ = false;
14284       small_ = false;
14285       reversed_ = false;
14286       consistency_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency.STRONG;
14287       caching_ = 0;
14288       allowPartialResults_ = false;
14289     }
14290     private byte memoizedIsInitialized = -1;
14291     public final boolean isInitialized() {
14292       byte isInitialized = memoizedIsInitialized;
14293       if (isInitialized != -1) return isInitialized == 1;
14294 
14295       for (int i = 0; i < getColumnCount(); i++) {
14296         if (!getColumn(i).isInitialized()) {
14297           memoizedIsInitialized = 0;
14298           return false;
14299         }
14300       }
14301       for (int i = 0; i < getAttributeCount(); i++) {
14302         if (!getAttribute(i).isInitialized()) {
14303           memoizedIsInitialized = 0;
14304           return false;
14305         }
14306       }
14307       if (hasFilter()) {
14308         if (!getFilter().isInitialized()) {
14309           memoizedIsInitialized = 0;
14310           return false;
14311         }
14312       }
14313       memoizedIsInitialized = 1;
14314       return true;
14315     }
14316 
14317     public void writeTo(com.google.protobuf.CodedOutputStream output)
14318                         throws java.io.IOException {
14319       getSerializedSize();
14320       for (int i = 0; i < column_.size(); i++) {
14321         output.writeMessage(1, column_.get(i));
14322       }
14323       for (int i = 0; i < attribute_.size(); i++) {
14324         output.writeMessage(2, attribute_.get(i));
14325       }
14326       if (((bitField0_ & 0x00000001) == 0x00000001)) {
14327         output.writeBytes(3, startRow_);
14328       }
14329       if (((bitField0_ & 0x00000002) == 0x00000002)) {
14330         output.writeBytes(4, stopRow_);
14331       }
14332       if (((bitField0_ & 0x00000004) == 0x00000004)) {
14333         output.writeMessage(5, filter_);
14334       }
14335       if (((bitField0_ & 0x00000008) == 0x00000008)) {
14336         output.writeMessage(6, timeRange_);
14337       }
14338       if (((bitField0_ & 0x00000010) == 0x00000010)) {
14339         output.writeUInt32(7, maxVersions_);
14340       }
14341       if (((bitField0_ & 0x00000020) == 0x00000020)) {
14342         output.writeBool(8, cacheBlocks_);
14343       }
14344       if (((bitField0_ & 0x00000040) == 0x00000040)) {
14345         output.writeUInt32(9, batchSize_);
14346       }
14347       if (((bitField0_ & 0x00000080) == 0x00000080)) {
14348         output.writeUInt64(10, maxResultSize_);
14349       }
14350       if (((bitField0_ & 0x00000100) == 0x00000100)) {
14351         output.writeUInt32(11, storeLimit_);
14352       }
14353       if (((bitField0_ & 0x00000200) == 0x00000200)) {
14354         output.writeUInt32(12, storeOffset_);
14355       }
14356       if (((bitField0_ & 0x00000400) == 0x00000400)) {
14357         output.writeBool(13, loadColumnFamiliesOnDemand_);
14358       }
14359       if (((bitField0_ & 0x00000800) == 0x00000800)) {
14360         output.writeBool(14, small_);
14361       }
14362       if (((bitField0_ & 0x00001000) == 0x00001000)) {
14363         output.writeBool(15, reversed_);
14364       }
14365       if (((bitField0_ & 0x00002000) == 0x00002000)) {
14366         output.writeEnum(16, consistency_.getNumber());
14367       }
14368       if (((bitField0_ & 0x00004000) == 0x00004000)) {
14369         output.writeUInt32(17, caching_);
14370       }
14371       if (((bitField0_ & 0x00008000) == 0x00008000)) {
14372         output.writeBool(18, allowPartialResults_);
14373       }
14374       getUnknownFields().writeTo(output);
14375     }
14376 
14377     private int memoizedSerializedSize = -1;
14378     public int getSerializedSize() {
14379       int size = memoizedSerializedSize;
14380       if (size != -1) return size;
14381 
14382       size = 0;
14383       for (int i = 0; i < column_.size(); i++) {
14384         size += com.google.protobuf.CodedOutputStream
14385           .computeMessageSize(1, column_.get(i));
14386       }
14387       for (int i = 0; i < attribute_.size(); i++) {
14388         size += com.google.protobuf.CodedOutputStream
14389           .computeMessageSize(2, attribute_.get(i));
14390       }
14391       if (((bitField0_ & 0x00000001) == 0x00000001)) {
14392         size += com.google.protobuf.CodedOutputStream
14393           .computeBytesSize(3, startRow_);
14394       }
14395       if (((bitField0_ & 0x00000002) == 0x00000002)) {
14396         size += com.google.protobuf.CodedOutputStream
14397           .computeBytesSize(4, stopRow_);
14398       }
14399       if (((bitField0_ & 0x00000004) == 0x00000004)) {
14400         size += com.google.protobuf.CodedOutputStream
14401           .computeMessageSize(5, filter_);
14402       }
14403       if (((bitField0_ & 0x00000008) == 0x00000008)) {
14404         size += com.google.protobuf.CodedOutputStream
14405           .computeMessageSize(6, timeRange_);
14406       }
14407       if (((bitField0_ & 0x00000010) == 0x00000010)) {
14408         size += com.google.protobuf.CodedOutputStream
14409           .computeUInt32Size(7, maxVersions_);
14410       }
14411       if (((bitField0_ & 0x00000020) == 0x00000020)) {
14412         size += com.google.protobuf.CodedOutputStream
14413           .computeBoolSize(8, cacheBlocks_);
14414       }
14415       if (((bitField0_ & 0x00000040) == 0x00000040)) {
14416         size += com.google.protobuf.CodedOutputStream
14417           .computeUInt32Size(9, batchSize_);
14418       }
14419       if (((bitField0_ & 0x00000080) == 0x00000080)) {
14420         size += com.google.protobuf.CodedOutputStream
14421           .computeUInt64Size(10, maxResultSize_);
14422       }
14423       if (((bitField0_ & 0x00000100) == 0x00000100)) {
14424         size += com.google.protobuf.CodedOutputStream
14425           .computeUInt32Size(11, storeLimit_);
14426       }
14427       if (((bitField0_ & 0x00000200) == 0x00000200)) {
14428         size += com.google.protobuf.CodedOutputStream
14429           .computeUInt32Size(12, storeOffset_);
14430       }
14431       if (((bitField0_ & 0x00000400) == 0x00000400)) {
14432         size += com.google.protobuf.CodedOutputStream
14433           .computeBoolSize(13, loadColumnFamiliesOnDemand_);
14434       }
14435       if (((bitField0_ & 0x00000800) == 0x00000800)) {
14436         size += com.google.protobuf.CodedOutputStream
14437           .computeBoolSize(14, small_);
14438       }
14439       if (((bitField0_ & 0x00001000) == 0x00001000)) {
14440         size += com.google.protobuf.CodedOutputStream
14441           .computeBoolSize(15, reversed_);
14442       }
14443       if (((bitField0_ & 0x00002000) == 0x00002000)) {
14444         size += com.google.protobuf.CodedOutputStream
14445           .computeEnumSize(16, consistency_.getNumber());
14446       }
14447       if (((bitField0_ & 0x00004000) == 0x00004000)) {
14448         size += com.google.protobuf.CodedOutputStream
14449           .computeUInt32Size(17, caching_);
14450       }
14451       if (((bitField0_ & 0x00008000) == 0x00008000)) {
14452         size += com.google.protobuf.CodedOutputStream
14453           .computeBoolSize(18, allowPartialResults_);
14454       }
14455       size += getUnknownFields().getSerializedSize();
14456       memoizedSerializedSize = size;
14457       return size;
14458     }
14459 
14460     private static final long serialVersionUID = 0L;
14461     @java.lang.Override
14462     protected java.lang.Object writeReplace()
14463         throws java.io.ObjectStreamException {
14464       return super.writeReplace();
14465     }
14466 
14467     @java.lang.Override
14468     public boolean equals(final java.lang.Object obj) {
14469       if (obj == this) {
14470        return true;
14471       }
14472       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan)) {
14473         return super.equals(obj);
14474       }
14475       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan) obj;
14476 
14477       boolean result = true;
14478       result = result && getColumnList()
14479           .equals(other.getColumnList());
14480       result = result && getAttributeList()
14481           .equals(other.getAttributeList());
14482       result = result && (hasStartRow() == other.hasStartRow());
14483       if (hasStartRow()) {
14484         result = result && getStartRow()
14485             .equals(other.getStartRow());
14486       }
14487       result = result && (hasStopRow() == other.hasStopRow());
14488       if (hasStopRow()) {
14489         result = result && getStopRow()
14490             .equals(other.getStopRow());
14491       }
14492       result = result && (hasFilter() == other.hasFilter());
14493       if (hasFilter()) {
14494         result = result && getFilter()
14495             .equals(other.getFilter());
14496       }
14497       result = result && (hasTimeRange() == other.hasTimeRange());
14498       if (hasTimeRange()) {
14499         result = result && getTimeRange()
14500             .equals(other.getTimeRange());
14501       }
14502       result = result && (hasMaxVersions() == other.hasMaxVersions());
14503       if (hasMaxVersions()) {
14504         result = result && (getMaxVersions()
14505             == other.getMaxVersions());
14506       }
14507       result = result && (hasCacheBlocks() == other.hasCacheBlocks());
14508       if (hasCacheBlocks()) {
14509         result = result && (getCacheBlocks()
14510             == other.getCacheBlocks());
14511       }
14512       result = result && (hasBatchSize() == other.hasBatchSize());
14513       if (hasBatchSize()) {
14514         result = result && (getBatchSize()
14515             == other.getBatchSize());
14516       }
14517       result = result && (hasMaxResultSize() == other.hasMaxResultSize());
14518       if (hasMaxResultSize()) {
14519         result = result && (getMaxResultSize()
14520             == other.getMaxResultSize());
14521       }
14522       result = result && (hasStoreLimit() == other.hasStoreLimit());
14523       if (hasStoreLimit()) {
14524         result = result && (getStoreLimit()
14525             == other.getStoreLimit());
14526       }
14527       result = result && (hasStoreOffset() == other.hasStoreOffset());
14528       if (hasStoreOffset()) {
14529         result = result && (getStoreOffset()
14530             == other.getStoreOffset());
14531       }
14532       result = result && (hasLoadColumnFamiliesOnDemand() == other.hasLoadColumnFamiliesOnDemand());
14533       if (hasLoadColumnFamiliesOnDemand()) {
14534         result = result && (getLoadColumnFamiliesOnDemand()
14535             == other.getLoadColumnFamiliesOnDemand());
14536       }
14537       result = result && (hasSmall() == other.hasSmall());
14538       if (hasSmall()) {
14539         result = result && (getSmall()
14540             == other.getSmall());
14541       }
14542       result = result && (hasReversed() == other.hasReversed());
14543       if (hasReversed()) {
14544         result = result && (getReversed()
14545             == other.getReversed());
14546       }
14547       result = result && (hasConsistency() == other.hasConsistency());
14548       if (hasConsistency()) {
14549         result = result &&
14550             (getConsistency() == other.getConsistency());
14551       }
14552       result = result && (hasCaching() == other.hasCaching());
14553       if (hasCaching()) {
14554         result = result && (getCaching()
14555             == other.getCaching());
14556       }
14557       result = result && (hasAllowPartialResults() == other.hasAllowPartialResults());
14558       if (hasAllowPartialResults()) {
14559         result = result && (getAllowPartialResults()
14560             == other.getAllowPartialResults());
14561       }
14562       result = result &&
14563           getUnknownFields().equals(other.getUnknownFields());
14564       return result;
14565     }
14566 
14567     private int memoizedHashCode = 0;
14568     @java.lang.Override
14569     public int hashCode() {
14570       if (memoizedHashCode != 0) {
14571         return memoizedHashCode;
14572       }
14573       int hash = 41;
14574       hash = (19 * hash) + getDescriptorForType().hashCode();
14575       if (getColumnCount() > 0) {
14576         hash = (37 * hash) + COLUMN_FIELD_NUMBER;
14577         hash = (53 * hash) + getColumnList().hashCode();
14578       }
14579       if (getAttributeCount() > 0) {
14580         hash = (37 * hash) + ATTRIBUTE_FIELD_NUMBER;
14581         hash = (53 * hash) + getAttributeList().hashCode();
14582       }
14583       if (hasStartRow()) {
14584         hash = (37 * hash) + START_ROW_FIELD_NUMBER;
14585         hash = (53 * hash) + getStartRow().hashCode();
14586       }
14587       if (hasStopRow()) {
14588         hash = (37 * hash) + STOP_ROW_FIELD_NUMBER;
14589         hash = (53 * hash) + getStopRow().hashCode();
14590       }
14591       if (hasFilter()) {
14592         hash = (37 * hash) + FILTER_FIELD_NUMBER;
14593         hash = (53 * hash) + getFilter().hashCode();
14594       }
14595       if (hasTimeRange()) {
14596         hash = (37 * hash) + TIME_RANGE_FIELD_NUMBER;
14597         hash = (53 * hash) + getTimeRange().hashCode();
14598       }
14599       if (hasMaxVersions()) {
14600         hash = (37 * hash) + MAX_VERSIONS_FIELD_NUMBER;
14601         hash = (53 * hash) + getMaxVersions();
14602       }
14603       if (hasCacheBlocks()) {
14604         hash = (37 * hash) + CACHE_BLOCKS_FIELD_NUMBER;
14605         hash = (53 * hash) + hashBoolean(getCacheBlocks());
14606       }
14607       if (hasBatchSize()) {
14608         hash = (37 * hash) + BATCH_SIZE_FIELD_NUMBER;
14609         hash = (53 * hash) + getBatchSize();
14610       }
14611       if (hasMaxResultSize()) {
14612         hash = (37 * hash) + MAX_RESULT_SIZE_FIELD_NUMBER;
14613         hash = (53 * hash) + hashLong(getMaxResultSize());
14614       }
14615       if (hasStoreLimit()) {
14616         hash = (37 * hash) + STORE_LIMIT_FIELD_NUMBER;
14617         hash = (53 * hash) + getStoreLimit();
14618       }
14619       if (hasStoreOffset()) {
14620         hash = (37 * hash) + STORE_OFFSET_FIELD_NUMBER;
14621         hash = (53 * hash) + getStoreOffset();
14622       }
14623       if (hasLoadColumnFamiliesOnDemand()) {
14624         hash = (37 * hash) + LOAD_COLUMN_FAMILIES_ON_DEMAND_FIELD_NUMBER;
14625         hash = (53 * hash) + hashBoolean(getLoadColumnFamiliesOnDemand());
14626       }
14627       if (hasSmall()) {
14628         hash = (37 * hash) + SMALL_FIELD_NUMBER;
14629         hash = (53 * hash) + hashBoolean(getSmall());
14630       }
14631       if (hasReversed()) {
14632         hash = (37 * hash) + REVERSED_FIELD_NUMBER;
14633         hash = (53 * hash) + hashBoolean(getReversed());
14634       }
14635       if (hasConsistency()) {
14636         hash = (37 * hash) + CONSISTENCY_FIELD_NUMBER;
14637         hash = (53 * hash) + hashEnum(getConsistency());
14638       }
14639       if (hasCaching()) {
14640         hash = (37 * hash) + CACHING_FIELD_NUMBER;
14641         hash = (53 * hash) + getCaching();
14642       }
14643       if (hasAllowPartialResults()) {
14644         hash = (37 * hash) + ALLOW_PARTIAL_RESULTS_FIELD_NUMBER;
14645         hash = (53 * hash) + hashBoolean(getAllowPartialResults());
14646       }
14647       hash = (29 * hash) + getUnknownFields().hashCode();
14648       memoizedHashCode = hash;
14649       return hash;
14650     }
14651 
14652     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom(
14653         com.google.protobuf.ByteString data)
14654         throws com.google.protobuf.InvalidProtocolBufferException {
14655       return PARSER.parseFrom(data);
14656     }
14657     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom(
14658         com.google.protobuf.ByteString data,
14659         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14660         throws com.google.protobuf.InvalidProtocolBufferException {
14661       return PARSER.parseFrom(data, extensionRegistry);
14662     }
14663     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom(byte[] data)
14664         throws com.google.protobuf.InvalidProtocolBufferException {
14665       return PARSER.parseFrom(data);
14666     }
14667     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom(
14668         byte[] data,
14669         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14670         throws com.google.protobuf.InvalidProtocolBufferException {
14671       return PARSER.parseFrom(data, extensionRegistry);
14672     }
14673     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom(java.io.InputStream input)
14674         throws java.io.IOException {
14675       return PARSER.parseFrom(input);
14676     }
14677     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom(
14678         java.io.InputStream input,
14679         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14680         throws java.io.IOException {
14681       return PARSER.parseFrom(input, extensionRegistry);
14682     }
14683     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseDelimitedFrom(java.io.InputStream input)
14684         throws java.io.IOException {
14685       return PARSER.parseDelimitedFrom(input);
14686     }
14687     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseDelimitedFrom(
14688         java.io.InputStream input,
14689         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14690         throws java.io.IOException {
14691       return PARSER.parseDelimitedFrom(input, extensionRegistry);
14692     }
14693     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom(
14694         com.google.protobuf.CodedInputStream input)
14695         throws java.io.IOException {
14696       return PARSER.parseFrom(input);
14697     }
14698     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom(
14699         com.google.protobuf.CodedInputStream input,
14700         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14701         throws java.io.IOException {
14702       return PARSER.parseFrom(input, extensionRegistry);
14703     }
14704 
14705     public static Builder newBuilder() { return Builder.create(); }
14706     public Builder newBuilderForType() { return newBuilder(); }
14707     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan prototype) {
14708       return newBuilder().mergeFrom(prototype);
14709     }
14710     public Builder toBuilder() { return newBuilder(this); }
14711 
14712     @java.lang.Override
14713     protected Builder newBuilderForType(
14714         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
14715       Builder builder = new Builder(parent);
14716       return builder;
14717     }
14718     /**
14719      * Protobuf type {@code hbase.pb.Scan}
14720      *
14721      * <pre>
14722      **
14723      * Instead of get from a table, you can scan it with optional filters.
14724      * You can specify the row key range, time range, the columns/families
14725      * to scan and so on.
14726      *
14727      * This scan is used the first time in a scan request. The response of
14728      * the initial scan will return a scanner id, which should be used to
14729      * fetch result batches later on before it is closed.
14730      * </pre>
14731      */
14732     public static final class Builder extends
14733         com.google.protobuf.GeneratedMessage.Builder<Builder>
14734        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder {
14735       public static final com.google.protobuf.Descriptors.Descriptor
14736           getDescriptor() {
14737         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Scan_descriptor;
14738       }
14739 
14740       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
14741           internalGetFieldAccessorTable() {
14742         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Scan_fieldAccessorTable
14743             .ensureFieldAccessorsInitialized(
14744                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder.class);
14745       }
14746 
14747       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.newBuilder()
14748       private Builder() {
14749         maybeForceBuilderInitialization();
14750       }
14751 
14752       private Builder(
14753           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
14754         super(parent);
14755         maybeForceBuilderInitialization();
14756       }
14757       private void maybeForceBuilderInitialization() {
14758         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
14759           getColumnFieldBuilder();
14760           getAttributeFieldBuilder();
14761           getFilterFieldBuilder();
14762           getTimeRangeFieldBuilder();
14763         }
14764       }
14765       private static Builder create() {
14766         return new Builder();
14767       }
14768 
14769       public Builder clear() {
14770         super.clear();
14771         if (columnBuilder_ == null) {
14772           column_ = java.util.Collections.emptyList();
14773           bitField0_ = (bitField0_ & ~0x00000001);
14774         } else {
14775           columnBuilder_.clear();
14776         }
14777         if (attributeBuilder_ == null) {
14778           attribute_ = java.util.Collections.emptyList();
14779           bitField0_ = (bitField0_ & ~0x00000002);
14780         } else {
14781           attributeBuilder_.clear();
14782         }
14783         startRow_ = com.google.protobuf.ByteString.EMPTY;
14784         bitField0_ = (bitField0_ & ~0x00000004);
14785         stopRow_ = com.google.protobuf.ByteString.EMPTY;
14786         bitField0_ = (bitField0_ & ~0x00000008);
14787         if (filterBuilder_ == null) {
14788           filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
14789         } else {
14790           filterBuilder_.clear();
14791         }
14792         bitField0_ = (bitField0_ & ~0x00000010);
14793         if (timeRangeBuilder_ == null) {
14794           timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance();
14795         } else {
14796           timeRangeBuilder_.clear();
14797         }
14798         bitField0_ = (bitField0_ & ~0x00000020);
14799         maxVersions_ = 1;
14800         bitField0_ = (bitField0_ & ~0x00000040);
14801         cacheBlocks_ = true;
14802         bitField0_ = (bitField0_ & ~0x00000080);
14803         batchSize_ = 0;
14804         bitField0_ = (bitField0_ & ~0x00000100);
14805         maxResultSize_ = 0L;
14806         bitField0_ = (bitField0_ & ~0x00000200);
14807         storeLimit_ = 0;
14808         bitField0_ = (bitField0_ & ~0x00000400);
14809         storeOffset_ = 0;
14810         bitField0_ = (bitField0_ & ~0x00000800);
14811         loadColumnFamiliesOnDemand_ = false;
14812         bitField0_ = (bitField0_ & ~0x00001000);
14813         small_ = false;
14814         bitField0_ = (bitField0_ & ~0x00002000);
14815         reversed_ = false;
14816         bitField0_ = (bitField0_ & ~0x00004000);
14817         consistency_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency.STRONG;
14818         bitField0_ = (bitField0_ & ~0x00008000);
14819         caching_ = 0;
14820         bitField0_ = (bitField0_ & ~0x00010000);
14821         allowPartialResults_ = false;
14822         bitField0_ = (bitField0_ & ~0x00020000);
14823         return this;
14824       }
14825 
14826       public Builder clone() {
14827         return create().mergeFrom(buildPartial());
14828       }
14829 
14830       public com.google.protobuf.Descriptors.Descriptor
14831           getDescriptorForType() {
14832         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Scan_descriptor;
14833       }
14834 
14835       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getDefaultInstanceForType() {
14836         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
14837       }
14838 
14839       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan build() {
14840         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan result = buildPartial();
14841         if (!result.isInitialized()) {
14842           throw newUninitializedMessageException(result);
14843         }
14844         return result;
14845       }
14846 
14847       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan buildPartial() {
14848         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan(this);
14849         int from_bitField0_ = bitField0_;
14850         int to_bitField0_ = 0;
14851         if (columnBuilder_ == null) {
14852           if (((bitField0_ & 0x00000001) == 0x00000001)) {
14853             column_ = java.util.Collections.unmodifiableList(column_);
14854             bitField0_ = (bitField0_ & ~0x00000001);
14855           }
14856           result.column_ = column_;
14857         } else {
14858           result.column_ = columnBuilder_.build();
14859         }
14860         if (attributeBuilder_ == null) {
14861           if (((bitField0_ & 0x00000002) == 0x00000002)) {
14862             attribute_ = java.util.Collections.unmodifiableList(attribute_);
14863             bitField0_ = (bitField0_ & ~0x00000002);
14864           }
14865           result.attribute_ = attribute_;
14866         } else {
14867           result.attribute_ = attributeBuilder_.build();
14868         }
14869         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
14870           to_bitField0_ |= 0x00000001;
14871         }
14872         result.startRow_ = startRow_;
14873         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
14874           to_bitField0_ |= 0x00000002;
14875         }
14876         result.stopRow_ = stopRow_;
14877         if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
14878           to_bitField0_ |= 0x00000004;
14879         }
14880         if (filterBuilder_ == null) {
14881           result.filter_ = filter_;
14882         } else {
14883           result.filter_ = filterBuilder_.build();
14884         }
14885         if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
14886           to_bitField0_ |= 0x00000008;
14887         }
14888         if (timeRangeBuilder_ == null) {
14889           result.timeRange_ = timeRange_;
14890         } else {
14891           result.timeRange_ = timeRangeBuilder_.build();
14892         }
14893         if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
14894           to_bitField0_ |= 0x00000010;
14895         }
14896         result.maxVersions_ = maxVersions_;
14897         if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
14898           to_bitField0_ |= 0x00000020;
14899         }
14900         result.cacheBlocks_ = cacheBlocks_;
14901         if (((from_bitField0_ & 0x00000100) == 0x00000100)) {
14902           to_bitField0_ |= 0x00000040;
14903         }
14904         result.batchSize_ = batchSize_;
14905         if (((from_bitField0_ & 0x00000200) == 0x00000200)) {
14906           to_bitField0_ |= 0x00000080;
14907         }
14908         result.maxResultSize_ = maxResultSize_;
14909         if (((from_bitField0_ & 0x00000400) == 0x00000400)) {
14910           to_bitField0_ |= 0x00000100;
14911         }
14912         result.storeLimit_ = storeLimit_;
14913         if (((from_bitField0_ & 0x00000800) == 0x00000800)) {
14914           to_bitField0_ |= 0x00000200;
14915         }
14916         result.storeOffset_ = storeOffset_;
14917         if (((from_bitField0_ & 0x00001000) == 0x00001000)) {
14918           to_bitField0_ |= 0x00000400;
14919         }
14920         result.loadColumnFamiliesOnDemand_ = loadColumnFamiliesOnDemand_;
14921         if (((from_bitField0_ & 0x00002000) == 0x00002000)) {
14922           to_bitField0_ |= 0x00000800;
14923         }
14924         result.small_ = small_;
14925         if (((from_bitField0_ & 0x00004000) == 0x00004000)) {
14926           to_bitField0_ |= 0x00001000;
14927         }
14928         result.reversed_ = reversed_;
14929         if (((from_bitField0_ & 0x00008000) == 0x00008000)) {
14930           to_bitField0_ |= 0x00002000;
14931         }
14932         result.consistency_ = consistency_;
14933         if (((from_bitField0_ & 0x00010000) == 0x00010000)) {
14934           to_bitField0_ |= 0x00004000;
14935         }
14936         result.caching_ = caching_;
14937         if (((from_bitField0_ & 0x00020000) == 0x00020000)) {
14938           to_bitField0_ |= 0x00008000;
14939         }
14940         result.allowPartialResults_ = allowPartialResults_;
14941         result.bitField0_ = to_bitField0_;
14942         onBuilt();
14943         return result;
14944       }
14945 
14946       public Builder mergeFrom(com.google.protobuf.Message other) {
14947         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan) {
14948           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan)other);
14949         } else {
14950           super.mergeFrom(other);
14951           return this;
14952         }
14953       }
14954 
14955       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan other) {
14956         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance()) return this;
14957         if (columnBuilder_ == null) {
14958           if (!other.column_.isEmpty()) {
14959             if (column_.isEmpty()) {
14960               column_ = other.column_;
14961               bitField0_ = (bitField0_ & ~0x00000001);
14962             } else {
14963               ensureColumnIsMutable();
14964               column_.addAll(other.column_);
14965             }
14966             onChanged();
14967           }
14968         } else {
14969           if (!other.column_.isEmpty()) {
14970             if (columnBuilder_.isEmpty()) {
14971               columnBuilder_.dispose();
14972               columnBuilder_ = null;
14973               column_ = other.column_;
14974               bitField0_ = (bitField0_ & ~0x00000001);
14975               columnBuilder_ = 
14976                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
14977                    getColumnFieldBuilder() : null;
14978             } else {
14979               columnBuilder_.addAllMessages(other.column_);
14980             }
14981           }
14982         }
14983         if (attributeBuilder_ == null) {
14984           if (!other.attribute_.isEmpty()) {
14985             if (attribute_.isEmpty()) {
14986               attribute_ = other.attribute_;
14987               bitField0_ = (bitField0_ & ~0x00000002);
14988             } else {
14989               ensureAttributeIsMutable();
14990               attribute_.addAll(other.attribute_);
14991             }
14992             onChanged();
14993           }
14994         } else {
14995           if (!other.attribute_.isEmpty()) {
14996             if (attributeBuilder_.isEmpty()) {
14997               attributeBuilder_.dispose();
14998               attributeBuilder_ = null;
14999               attribute_ = other.attribute_;
15000               bitField0_ = (bitField0_ & ~0x00000002);
15001               attributeBuilder_ = 
15002                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
15003                    getAttributeFieldBuilder() : null;
15004             } else {
15005               attributeBuilder_.addAllMessages(other.attribute_);
15006             }
15007           }
15008         }
15009         if (other.hasStartRow()) {
15010           setStartRow(other.getStartRow());
15011         }
15012         if (other.hasStopRow()) {
15013           setStopRow(other.getStopRow());
15014         }
15015         if (other.hasFilter()) {
15016           mergeFilter(other.getFilter());
15017         }
15018         if (other.hasTimeRange()) {
15019           mergeTimeRange(other.getTimeRange());
15020         }
15021         if (other.hasMaxVersions()) {
15022           setMaxVersions(other.getMaxVersions());
15023         }
15024         if (other.hasCacheBlocks()) {
15025           setCacheBlocks(other.getCacheBlocks());
15026         }
15027         if (other.hasBatchSize()) {
15028           setBatchSize(other.getBatchSize());
15029         }
15030         if (other.hasMaxResultSize()) {
15031           setMaxResultSize(other.getMaxResultSize());
15032         }
15033         if (other.hasStoreLimit()) {
15034           setStoreLimit(other.getStoreLimit());
15035         }
15036         if (other.hasStoreOffset()) {
15037           setStoreOffset(other.getStoreOffset());
15038         }
15039         if (other.hasLoadColumnFamiliesOnDemand()) {
15040           setLoadColumnFamiliesOnDemand(other.getLoadColumnFamiliesOnDemand());
15041         }
15042         if (other.hasSmall()) {
15043           setSmall(other.getSmall());
15044         }
15045         if (other.hasReversed()) {
15046           setReversed(other.getReversed());
15047         }
15048         if (other.hasConsistency()) {
15049           setConsistency(other.getConsistency());
15050         }
15051         if (other.hasCaching()) {
15052           setCaching(other.getCaching());
15053         }
15054         if (other.hasAllowPartialResults()) {
15055           setAllowPartialResults(other.getAllowPartialResults());
15056         }
15057         this.mergeUnknownFields(other.getUnknownFields());
15058         return this;
15059       }
15060 
15061       public final boolean isInitialized() {
15062         for (int i = 0; i < getColumnCount(); i++) {
15063           if (!getColumn(i).isInitialized()) {
15064             
15065             return false;
15066           }
15067         }
15068         for (int i = 0; i < getAttributeCount(); i++) {
15069           if (!getAttribute(i).isInitialized()) {
15070             
15071             return false;
15072           }
15073         }
15074         if (hasFilter()) {
15075           if (!getFilter().isInitialized()) {
15076             
15077             return false;
15078           }
15079         }
15080         return true;
15081       }
15082 
15083       public Builder mergeFrom(
15084           com.google.protobuf.CodedInputStream input,
15085           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15086           throws java.io.IOException {
15087         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parsedMessage = null;
15088         try {
15089           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
15090         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
15091           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan) e.getUnfinishedMessage();
15092           throw e;
15093         } finally {
15094           if (parsedMessage != null) {
15095             mergeFrom(parsedMessage);
15096           }
15097         }
15098         return this;
15099       }
15100       private int bitField0_;
15101 
15102       // repeated .hbase.pb.Column column = 1;
15103       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> column_ =
15104         java.util.Collections.emptyList();
15105       private void ensureColumnIsMutable() {
15106         if (!((bitField0_ & 0x00000001) == 0x00000001)) {
15107           column_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column>(column_);
15108           bitField0_ |= 0x00000001;
15109          }
15110       }
15111 
15112       private com.google.protobuf.RepeatedFieldBuilder<
15113           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder> columnBuilder_;
15114 
15115       /**
15116        * <code>repeated .hbase.pb.Column column = 1;</code>
15117        */
15118       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> getColumnList() {
15119         if (columnBuilder_ == null) {
15120           return java.util.Collections.unmodifiableList(column_);
15121         } else {
15122           return columnBuilder_.getMessageList();
15123         }
15124       }
15125       /**
15126        * <code>repeated .hbase.pb.Column column = 1;</code>
15127        */
15128       public int getColumnCount() {
15129         if (columnBuilder_ == null) {
15130           return column_.size();
15131         } else {
15132           return columnBuilder_.getCount();
15133         }
15134       }
15135       /**
15136        * <code>repeated .hbase.pb.Column column = 1;</code>
15137        */
15138       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index) {
15139         if (columnBuilder_ == null) {
15140           return column_.get(index);
15141         } else {
15142           return columnBuilder_.getMessage(index);
15143         }
15144       }
15145       /**
15146        * <code>repeated .hbase.pb.Column column = 1;</code>
15147        */
15148       public Builder setColumn(
15149           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) {
15150         if (columnBuilder_ == null) {
15151           if (value == null) {
15152             throw new NullPointerException();
15153           }
15154           ensureColumnIsMutable();
15155           column_.set(index, value);
15156           onChanged();
15157         } else {
15158           columnBuilder_.setMessage(index, value);
15159         }
15160         return this;
15161       }
15162       /**
15163        * <code>repeated .hbase.pb.Column column = 1;</code>
15164        */
15165       public Builder setColumn(
15166           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) {
15167         if (columnBuilder_ == null) {
15168           ensureColumnIsMutable();
15169           column_.set(index, builderForValue.build());
15170           onChanged();
15171         } else {
15172           columnBuilder_.setMessage(index, builderForValue.build());
15173         }
15174         return this;
15175       }
15176       /**
15177        * <code>repeated .hbase.pb.Column column = 1;</code>
15178        */
15179       public Builder addColumn(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) {
15180         if (columnBuilder_ == null) {
15181           if (value == null) {
15182             throw new NullPointerException();
15183           }
15184           ensureColumnIsMutable();
15185           column_.add(value);
15186           onChanged();
15187         } else {
15188           columnBuilder_.addMessage(value);
15189         }
15190         return this;
15191       }
15192       /**
15193        * <code>repeated .hbase.pb.Column column = 1;</code>
15194        */
15195       public Builder addColumn(
15196           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) {
15197         if (columnBuilder_ == null) {
15198           if (value == null) {
15199             throw new NullPointerException();
15200           }
15201           ensureColumnIsMutable();
15202           column_.add(index, value);
15203           onChanged();
15204         } else {
15205           columnBuilder_.addMessage(index, value);
15206         }
15207         return this;
15208       }
15209       /**
15210        * <code>repeated .hbase.pb.Column column = 1;</code>
15211        */
15212       public Builder addColumn(
15213           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) {
15214         if (columnBuilder_ == null) {
15215           ensureColumnIsMutable();
15216           column_.add(builderForValue.build());
15217           onChanged();
15218         } else {
15219           columnBuilder_.addMessage(builderForValue.build());
15220         }
15221         return this;
15222       }
15223       /**
15224        * <code>repeated .hbase.pb.Column column = 1;</code>
15225        */
15226       public Builder addColumn(
15227           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) {
15228         if (columnBuilder_ == null) {
15229           ensureColumnIsMutable();
15230           column_.add(index, builderForValue.build());
15231           onChanged();
15232         } else {
15233           columnBuilder_.addMessage(index, builderForValue.build());
15234         }
15235         return this;
15236       }
15237       /**
15238        * <code>repeated .hbase.pb.Column column = 1;</code>
15239        */
15240       public Builder addAllColumn(
15241           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> values) {
15242         if (columnBuilder_ == null) {
15243           ensureColumnIsMutable();
15244           super.addAll(values, column_);
15245           onChanged();
15246         } else {
15247           columnBuilder_.addAllMessages(values);
15248         }
15249         return this;
15250       }
15251       /**
15252        * <code>repeated .hbase.pb.Column column = 1;</code>
15253        */
15254       public Builder clearColumn() {
15255         if (columnBuilder_ == null) {
15256           column_ = java.util.Collections.emptyList();
15257           bitField0_ = (bitField0_ & ~0x00000001);
15258           onChanged();
15259         } else {
15260           columnBuilder_.clear();
15261         }
15262         return this;
15263       }
15264       /**
15265        * <code>repeated .hbase.pb.Column column = 1;</code>
15266        */
15267       public Builder removeColumn(int index) {
15268         if (columnBuilder_ == null) {
15269           ensureColumnIsMutable();
15270           column_.remove(index);
15271           onChanged();
15272         } else {
15273           columnBuilder_.remove(index);
15274         }
15275         return this;
15276       }
15277       /**
15278        * <code>repeated .hbase.pb.Column column = 1;</code>
15279        */
15280       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder getColumnBuilder(
15281           int index) {
15282         return getColumnFieldBuilder().getBuilder(index);
15283       }
15284       /**
15285        * <code>repeated .hbase.pb.Column column = 1;</code>
15286        */
15287       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder(
15288           int index) {
15289         if (columnBuilder_ == null) {
15290           return column_.get(index);  } else {
15291           return columnBuilder_.getMessageOrBuilder(index);
15292         }
15293       }
15294       /**
15295        * <code>repeated .hbase.pb.Column column = 1;</code>
15296        */
15297       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder> 
15298            getColumnOrBuilderList() {
15299         if (columnBuilder_ != null) {
15300           return columnBuilder_.getMessageOrBuilderList();
15301         } else {
15302           return java.util.Collections.unmodifiableList(column_);
15303         }
15304       }
15305       /**
15306        * <code>repeated .hbase.pb.Column column = 1;</code>
15307        */
15308       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder addColumnBuilder() {
15309         return getColumnFieldBuilder().addBuilder(
15310             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance());
15311       }
15312       /**
15313        * <code>repeated .hbase.pb.Column column = 1;</code>
15314        */
15315       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder addColumnBuilder(
15316           int index) {
15317         return getColumnFieldBuilder().addBuilder(
15318             index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance());
15319       }
15320       /**
15321        * <code>repeated .hbase.pb.Column column = 1;</code>
15322        */
15323       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder> 
15324            getColumnBuilderList() {
15325         return getColumnFieldBuilder().getBuilderList();
15326       }
15327       private com.google.protobuf.RepeatedFieldBuilder<
15328           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder> 
15329           getColumnFieldBuilder() {
15330         if (columnBuilder_ == null) {
15331           columnBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
15332               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder>(
15333                   column_,
15334                   ((bitField0_ & 0x00000001) == 0x00000001),
15335                   getParentForChildren(),
15336                   isClean());
15337           column_ = null;
15338         }
15339         return columnBuilder_;
15340       }
15341 
15342       // repeated .hbase.pb.NameBytesPair attribute = 2;
15343       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> attribute_ =
15344         java.util.Collections.emptyList();
15345       private void ensureAttributeIsMutable() {
15346         if (!((bitField0_ & 0x00000002) == 0x00000002)) {
15347           attribute_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair>(attribute_);
15348           bitField0_ |= 0x00000002;
15349          }
15350       }
15351 
15352       private com.google.protobuf.RepeatedFieldBuilder<
15353           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> attributeBuilder_;
15354 
15355       /**
15356        * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
15357        */
15358       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> getAttributeList() {
15359         if (attributeBuilder_ == null) {
15360           return java.util.Collections.unmodifiableList(attribute_);
15361         } else {
15362           return attributeBuilder_.getMessageList();
15363         }
15364       }
15365       /**
15366        * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
15367        */
15368       public int getAttributeCount() {
15369         if (attributeBuilder_ == null) {
15370           return attribute_.size();
15371         } else {
15372           return attributeBuilder_.getCount();
15373         }
15374       }
15375       /**
15376        * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
15377        */
15378       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) {
15379         if (attributeBuilder_ == null) {
15380           return attribute_.get(index);
15381         } else {
15382           return attributeBuilder_.getMessage(index);
15383         }
15384       }
15385       /**
15386        * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
15387        */
15388       public Builder setAttribute(
15389           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
15390         if (attributeBuilder_ == null) {
15391           if (value == null) {
15392             throw new NullPointerException();
15393           }
15394           ensureAttributeIsMutable();
15395           attribute_.set(index, value);
15396           onChanged();
15397         } else {
15398           attributeBuilder_.setMessage(index, value);
15399         }
15400         return this;
15401       }
15402       /**
15403        * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
15404        */
15405       public Builder setAttribute(
15406           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
15407         if (attributeBuilder_ == null) {
15408           ensureAttributeIsMutable();
15409           attribute_.set(index, builderForValue.build());
15410           onChanged();
15411         } else {
15412           attributeBuilder_.setMessage(index, builderForValue.build());
15413         }
15414         return this;
15415       }
15416       /**
15417        * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
15418        */
15419       public Builder addAttribute(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
15420         if (attributeBuilder_ == null) {
15421           if (value == null) {
15422             throw new NullPointerException();
15423           }
15424           ensureAttributeIsMutable();
15425           attribute_.add(value);
15426           onChanged();
15427         } else {
15428           attributeBuilder_.addMessage(value);
15429         }
15430         return this;
15431       }
15432       /**
15433        * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
15434        */
15435       public Builder addAttribute(
15436           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
15437         if (attributeBuilder_ == null) {
15438           if (value == null) {
15439             throw new NullPointerException();
15440           }
15441           ensureAttributeIsMutable();
15442           attribute_.add(index, value);
15443           onChanged();
15444         } else {
15445           attributeBuilder_.addMessage(index, value);
15446         }
15447         return this;
15448       }
15449       /**
15450        * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
15451        */
15452       public Builder addAttribute(
15453           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
15454         if (attributeBuilder_ == null) {
15455           ensureAttributeIsMutable();
15456           attribute_.add(builderForValue.build());
15457           onChanged();
15458         } else {
15459           attributeBuilder_.addMessage(builderForValue.build());
15460         }
15461         return this;
15462       }
15463       /**
15464        * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
15465        */
15466       public Builder addAttribute(
15467           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
15468         if (attributeBuilder_ == null) {
15469           ensureAttributeIsMutable();
15470           attribute_.add(index, builderForValue.build());
15471           onChanged();
15472         } else {
15473           attributeBuilder_.addMessage(index, builderForValue.build());
15474         }
15475         return this;
15476       }
15477       /**
15478        * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
15479        */
15480       public Builder addAllAttribute(
15481           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> values) {
15482         if (attributeBuilder_ == null) {
15483           ensureAttributeIsMutable();
15484           super.addAll(values, attribute_);
15485           onChanged();
15486         } else {
15487           attributeBuilder_.addAllMessages(values);
15488         }
15489         return this;
15490       }
15491       /**
15492        * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
15493        */
15494       public Builder clearAttribute() {
15495         if (attributeBuilder_ == null) {
15496           attribute_ = java.util.Collections.emptyList();
15497           bitField0_ = (bitField0_ & ~0x00000002);
15498           onChanged();
15499         } else {
15500           attributeBuilder_.clear();
15501         }
15502         return this;
15503       }
15504       /**
15505        * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
15506        */
15507       public Builder removeAttribute(int index) {
15508         if (attributeBuilder_ == null) {
15509           ensureAttributeIsMutable();
15510           attribute_.remove(index);
15511           onChanged();
15512         } else {
15513           attributeBuilder_.remove(index);
15514         }
15515         return this;
15516       }
15517       /**
15518        * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
15519        */
15520       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getAttributeBuilder(
15521           int index) {
15522         return getAttributeFieldBuilder().getBuilder(index);
15523       }
15524       /**
15525        * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
15526        */
15527       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder(
15528           int index) {
15529         if (attributeBuilder_ == null) {
15530           return attribute_.get(index);  } else {
15531           return attributeBuilder_.getMessageOrBuilder(index);
15532         }
15533       }
15534       /**
15535        * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
15536        */
15537       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> 
15538            getAttributeOrBuilderList() {
15539         if (attributeBuilder_ != null) {
15540           return attributeBuilder_.getMessageOrBuilderList();
15541         } else {
15542           return java.util.Collections.unmodifiableList(attribute_);
15543         }
15544       }
15545       /**
15546        * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
15547        */
15548       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder() {
15549         return getAttributeFieldBuilder().addBuilder(
15550             org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance());
15551       }
15552       /**
15553        * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
15554        */
15555       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder(
15556           int index) {
15557         return getAttributeFieldBuilder().addBuilder(
15558             index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance());
15559       }
15560       /**
15561        * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
15562        */
15563       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder> 
15564            getAttributeBuilderList() {
15565         return getAttributeFieldBuilder().getBuilderList();
15566       }
15567       private com.google.protobuf.RepeatedFieldBuilder<
15568           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> 
15569           getAttributeFieldBuilder() {
15570         if (attributeBuilder_ == null) {
15571           attributeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
15572               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>(
15573                   attribute_,
15574                   ((bitField0_ & 0x00000002) == 0x00000002),
15575                   getParentForChildren(),
15576                   isClean());
15577           attribute_ = null;
15578         }
15579         return attributeBuilder_;
15580       }
15581 
15582       // optional bytes start_row = 3;
15583       private com.google.protobuf.ByteString startRow_ = com.google.protobuf.ByteString.EMPTY;
15584       /**
15585        * <code>optional bytes start_row = 3;</code>
15586        */
15587       public boolean hasStartRow() {
15588         return ((bitField0_ & 0x00000004) == 0x00000004);
15589       }
15590       /**
15591        * <code>optional bytes start_row = 3;</code>
15592        */
15593       public com.google.protobuf.ByteString getStartRow() {
15594         return startRow_;
15595       }
15596       /**
15597        * <code>optional bytes start_row = 3;</code>
15598        */
15599       public Builder setStartRow(com.google.protobuf.ByteString value) {
15600         if (value == null) {
15601     throw new NullPointerException();
15602   }
15603   bitField0_ |= 0x00000004;
15604         startRow_ = value;
15605         onChanged();
15606         return this;
15607       }
15608       /**
15609        * <code>optional bytes start_row = 3;</code>
15610        */
15611       public Builder clearStartRow() {
15612         bitField0_ = (bitField0_ & ~0x00000004);
15613         startRow_ = getDefaultInstance().getStartRow();
15614         onChanged();
15615         return this;
15616       }
15617 
15618       // optional bytes stop_row = 4;
15619       private com.google.protobuf.ByteString stopRow_ = com.google.protobuf.ByteString.EMPTY;
15620       /**
15621        * <code>optional bytes stop_row = 4;</code>
15622        */
15623       public boolean hasStopRow() {
15624         return ((bitField0_ & 0x00000008) == 0x00000008);
15625       }
15626       /**
15627        * <code>optional bytes stop_row = 4;</code>
15628        */
15629       public com.google.protobuf.ByteString getStopRow() {
15630         return stopRow_;
15631       }
15632       /**
15633        * <code>optional bytes stop_row = 4;</code>
15634        */
15635       public Builder setStopRow(com.google.protobuf.ByteString value) {
15636         if (value == null) {
15637     throw new NullPointerException();
15638   }
15639   bitField0_ |= 0x00000008;
15640         stopRow_ = value;
15641         onChanged();
15642         return this;
15643       }
15644       /**
15645        * <code>optional bytes stop_row = 4;</code>
15646        */
15647       public Builder clearStopRow() {
15648         bitField0_ = (bitField0_ & ~0x00000008);
15649         stopRow_ = getDefaultInstance().getStopRow();
15650         onChanged();
15651         return this;
15652       }
15653 
15654       // optional .hbase.pb.Filter filter = 5;
15655       private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
15656       private com.google.protobuf.SingleFieldBuilder<
15657           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder> filterBuilder_;
15658       /**
15659        * <code>optional .hbase.pb.Filter filter = 5;</code>
15660        */
15661       public boolean hasFilter() {
15662         return ((bitField0_ & 0x00000010) == 0x00000010);
15663       }
15664       /**
15665        * <code>optional .hbase.pb.Filter filter = 5;</code>
15666        */
15667       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter() {
15668         if (filterBuilder_ == null) {
15669           return filter_;
15670         } else {
15671           return filterBuilder_.getMessage();
15672         }
15673       }
15674       /**
15675        * <code>optional .hbase.pb.Filter filter = 5;</code>
15676        */
15677       public Builder setFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value) {
15678         if (filterBuilder_ == null) {
15679           if (value == null) {
15680             throw new NullPointerException();
15681           }
15682           filter_ = value;
15683           onChanged();
15684         } else {
15685           filterBuilder_.setMessage(value);
15686         }
15687         bitField0_ |= 0x00000010;
15688         return this;
15689       }
15690       /**
15691        * <code>optional .hbase.pb.Filter filter = 5;</code>
15692        */
15693       public Builder setFilter(
15694           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder builderForValue) {
15695         if (filterBuilder_ == null) {
15696           filter_ = builderForValue.build();
15697           onChanged();
15698         } else {
15699           filterBuilder_.setMessage(builderForValue.build());
15700         }
15701         bitField0_ |= 0x00000010;
15702         return this;
15703       }
15704       /**
15705        * <code>optional .hbase.pb.Filter filter = 5;</code>
15706        */
15707       public Builder mergeFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value) {
15708         if (filterBuilder_ == null) {
15709           if (((bitField0_ & 0x00000010) == 0x00000010) &&
15710               filter_ != org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance()) {
15711             filter_ =
15712               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.newBuilder(filter_).mergeFrom(value).buildPartial();
15713           } else {
15714             filter_ = value;
15715           }
15716           onChanged();
15717         } else {
15718           filterBuilder_.mergeFrom(value);
15719         }
15720         bitField0_ |= 0x00000010;
15721         return this;
15722       }
15723       /**
15724        * <code>optional .hbase.pb.Filter filter = 5;</code>
15725        */
15726       public Builder clearFilter() {
15727         if (filterBuilder_ == null) {
15728           filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
15729           onChanged();
15730         } else {
15731           filterBuilder_.clear();
15732         }
15733         bitField0_ = (bitField0_ & ~0x00000010);
15734         return this;
15735       }
15736       /**
15737        * <code>optional .hbase.pb.Filter filter = 5;</code>
15738        */
15739       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder getFilterBuilder() {
15740         bitField0_ |= 0x00000010;
15741         onChanged();
15742         return getFilterFieldBuilder().getBuilder();
15743       }
15744       /**
15745        * <code>optional .hbase.pb.Filter filter = 5;</code>
15746        */
15747       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder() {
15748         if (filterBuilder_ != null) {
15749           return filterBuilder_.getMessageOrBuilder();
15750         } else {
15751           return filter_;
15752         }
15753       }
15754       /**
15755        * <code>optional .hbase.pb.Filter filter = 5;</code>
15756        */
15757       private com.google.protobuf.SingleFieldBuilder<
15758           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder> 
15759           getFilterFieldBuilder() {
15760         if (filterBuilder_ == null) {
15761           filterBuilder_ = new com.google.protobuf.SingleFieldBuilder<
15762               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder>(
15763                   filter_,
15764                   getParentForChildren(),
15765                   isClean());
15766           filter_ = null;
15767         }
15768         return filterBuilder_;
15769       }
15770 
15771       // optional .hbase.pb.TimeRange time_range = 6;
15772       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance();
15773       private com.google.protobuf.SingleFieldBuilder<
15774           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> timeRangeBuilder_;
15775       /**
15776        * <code>optional .hbase.pb.TimeRange time_range = 6;</code>
15777        */
15778       public boolean hasTimeRange() {
15779         return ((bitField0_ & 0x00000020) == 0x00000020);
15780       }
15781       /**
15782        * <code>optional .hbase.pb.TimeRange time_range = 6;</code>
15783        */
15784       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() {
15785         if (timeRangeBuilder_ == null) {
15786           return timeRange_;
15787         } else {
15788           return timeRangeBuilder_.getMessage();
15789         }
15790       }
15791       /**
15792        * <code>optional .hbase.pb.TimeRange time_range = 6;</code>
15793        */
15794       public Builder setTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) {
15795         if (timeRangeBuilder_ == null) {
15796           if (value == null) {
15797             throw new NullPointerException();
15798           }
15799           timeRange_ = value;
15800           onChanged();
15801         } else {
15802           timeRangeBuilder_.setMessage(value);
15803         }
15804         bitField0_ |= 0x00000020;
15805         return this;
15806       }
15807       /**
15808        * <code>optional .hbase.pb.TimeRange time_range = 6;</code>
15809        */
15810       public Builder setTimeRange(
15811           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder builderForValue) {
15812         if (timeRangeBuilder_ == null) {
15813           timeRange_ = builderForValue.build();
15814           onChanged();
15815         } else {
15816           timeRangeBuilder_.setMessage(builderForValue.build());
15817         }
15818         bitField0_ |= 0x00000020;
15819         return this;
15820       }
15821       /**
15822        * <code>optional .hbase.pb.TimeRange time_range = 6;</code>
15823        */
15824       public Builder mergeTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) {
15825         if (timeRangeBuilder_ == null) {
15826           if (((bitField0_ & 0x00000020) == 0x00000020) &&
15827               timeRange_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance()) {
15828             timeRange_ =
15829               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder(timeRange_).mergeFrom(value).buildPartial();
15830           } else {
15831             timeRange_ = value;
15832           }
15833           onChanged();
15834         } else {
15835           timeRangeBuilder_.mergeFrom(value);
15836         }
15837         bitField0_ |= 0x00000020;
15838         return this;
15839       }
15840       /**
15841        * <code>optional .hbase.pb.TimeRange time_range = 6;</code>
15842        */
15843       public Builder clearTimeRange() {
15844         if (timeRangeBuilder_ == null) {
15845           timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance();
15846           onChanged();
15847         } else {
15848           timeRangeBuilder_.clear();
15849         }
15850         bitField0_ = (bitField0_ & ~0x00000020);
15851         return this;
15852       }
15853       /**
15854        * <code>optional .hbase.pb.TimeRange time_range = 6;</code>
15855        */
15856       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder getTimeRangeBuilder() {
15857         bitField0_ |= 0x00000020;
15858         onChanged();
15859         return getTimeRangeFieldBuilder().getBuilder();
15860       }
15861       /**
15862        * <code>optional .hbase.pb.TimeRange time_range = 6;</code>
15863        */
15864       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() {
15865         if (timeRangeBuilder_ != null) {
15866           return timeRangeBuilder_.getMessageOrBuilder();
15867         } else {
15868           return timeRange_;
15869         }
15870       }
15871       /**
15872        * <code>optional .hbase.pb.TimeRange time_range = 6;</code>
15873        */
15874       private com.google.protobuf.SingleFieldBuilder<
15875           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> 
15876           getTimeRangeFieldBuilder() {
15877         if (timeRangeBuilder_ == null) {
15878           timeRangeBuilder_ = new com.google.protobuf.SingleFieldBuilder<
15879               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder>(
15880                   timeRange_,
15881                   getParentForChildren(),
15882                   isClean());
15883           timeRange_ = null;
15884         }
15885         return timeRangeBuilder_;
15886       }
15887 
15888       // optional uint32 max_versions = 7 [default = 1];
15889       private int maxVersions_ = 1;
15890       /**
15891        * <code>optional uint32 max_versions = 7 [default = 1];</code>
15892        */
15893       public boolean hasMaxVersions() {
15894         return ((bitField0_ & 0x00000040) == 0x00000040);
15895       }
15896       /**
15897        * <code>optional uint32 max_versions = 7 [default = 1];</code>
15898        */
15899       public int getMaxVersions() {
15900         return maxVersions_;
15901       }
15902       /**
15903        * <code>optional uint32 max_versions = 7 [default = 1];</code>
15904        */
15905       public Builder setMaxVersions(int value) {
15906         bitField0_ |= 0x00000040;
15907         maxVersions_ = value;
15908         onChanged();
15909         return this;
15910       }
15911       /**
15912        * <code>optional uint32 max_versions = 7 [default = 1];</code>
15913        */
15914       public Builder clearMaxVersions() {
15915         bitField0_ = (bitField0_ & ~0x00000040);
15916         maxVersions_ = 1;
15917         onChanged();
15918         return this;
15919       }
15920 
15921       // optional bool cache_blocks = 8 [default = true];
15922       private boolean cacheBlocks_ = true;
15923       /**
15924        * <code>optional bool cache_blocks = 8 [default = true];</code>
15925        */
15926       public boolean hasCacheBlocks() {
15927         return ((bitField0_ & 0x00000080) == 0x00000080);
15928       }
15929       /**
15930        * <code>optional bool cache_blocks = 8 [default = true];</code>
15931        */
15932       public boolean getCacheBlocks() {
15933         return cacheBlocks_;
15934       }
15935       /**
15936        * <code>optional bool cache_blocks = 8 [default = true];</code>
15937        */
15938       public Builder setCacheBlocks(boolean value) {
15939         bitField0_ |= 0x00000080;
15940         cacheBlocks_ = value;
15941         onChanged();
15942         return this;
15943       }
15944       /**
15945        * <code>optional bool cache_blocks = 8 [default = true];</code>
15946        */
15947       public Builder clearCacheBlocks() {
15948         bitField0_ = (bitField0_ & ~0x00000080);
15949         cacheBlocks_ = true;
15950         onChanged();
15951         return this;
15952       }
15953 
15954       // optional uint32 batch_size = 9;
15955       private int batchSize_ ;
15956       /**
15957        * <code>optional uint32 batch_size = 9;</code>
15958        */
15959       public boolean hasBatchSize() {
15960         return ((bitField0_ & 0x00000100) == 0x00000100);
15961       }
15962       /**
15963        * <code>optional uint32 batch_size = 9;</code>
15964        */
15965       public int getBatchSize() {
15966         return batchSize_;
15967       }
15968       /**
15969        * <code>optional uint32 batch_size = 9;</code>
15970        */
15971       public Builder setBatchSize(int value) {
15972         bitField0_ |= 0x00000100;
15973         batchSize_ = value;
15974         onChanged();
15975         return this;
15976       }
15977       /**
15978        * <code>optional uint32 batch_size = 9;</code>
15979        */
15980       public Builder clearBatchSize() {
15981         bitField0_ = (bitField0_ & ~0x00000100);
15982         batchSize_ = 0;
15983         onChanged();
15984         return this;
15985       }
15986 
15987       // optional uint64 max_result_size = 10;
15988       private long maxResultSize_ ;
15989       /**
15990        * <code>optional uint64 max_result_size = 10;</code>
15991        */
15992       public boolean hasMaxResultSize() {
15993         return ((bitField0_ & 0x00000200) == 0x00000200);
15994       }
15995       /**
15996        * <code>optional uint64 max_result_size = 10;</code>
15997        */
15998       public long getMaxResultSize() {
15999         return maxResultSize_;
16000       }
16001       /**
16002        * <code>optional uint64 max_result_size = 10;</code>
16003        */
16004       public Builder setMaxResultSize(long value) {
16005         bitField0_ |= 0x00000200;
16006         maxResultSize_ = value;
16007         onChanged();
16008         return this;
16009       }
16010       /**
16011        * <code>optional uint64 max_result_size = 10;</code>
16012        */
16013       public Builder clearMaxResultSize() {
16014         bitField0_ = (bitField0_ & ~0x00000200);
16015         maxResultSize_ = 0L;
16016         onChanged();
16017         return this;
16018       }
16019 
16020       // optional uint32 store_limit = 11;
16021       private int storeLimit_ ;
16022       /**
16023        * <code>optional uint32 store_limit = 11;</code>
16024        */
16025       public boolean hasStoreLimit() {
16026         return ((bitField0_ & 0x00000400) == 0x00000400);
16027       }
16028       /**
16029        * <code>optional uint32 store_limit = 11;</code>
16030        */
16031       public int getStoreLimit() {
16032         return storeLimit_;
16033       }
16034       /**
16035        * <code>optional uint32 store_limit = 11;</code>
16036        */
16037       public Builder setStoreLimit(int value) {
16038         bitField0_ |= 0x00000400;
16039         storeLimit_ = value;
16040         onChanged();
16041         return this;
16042       }
16043       /**
16044        * <code>optional uint32 store_limit = 11;</code>
16045        */
16046       public Builder clearStoreLimit() {
16047         bitField0_ = (bitField0_ & ~0x00000400);
16048         storeLimit_ = 0;
16049         onChanged();
16050         return this;
16051       }
16052 
16053       // optional uint32 store_offset = 12;
16054       private int storeOffset_ ;
16055       /**
16056        * <code>optional uint32 store_offset = 12;</code>
16057        */
16058       public boolean hasStoreOffset() {
16059         return ((bitField0_ & 0x00000800) == 0x00000800);
16060       }
16061       /**
16062        * <code>optional uint32 store_offset = 12;</code>
16063        */
16064       public int getStoreOffset() {
16065         return storeOffset_;
16066       }
16067       /**
16068        * <code>optional uint32 store_offset = 12;</code>
16069        */
16070       public Builder setStoreOffset(int value) {
16071         bitField0_ |= 0x00000800;
16072         storeOffset_ = value;
16073         onChanged();
16074         return this;
16075       }
16076       /**
16077        * <code>optional uint32 store_offset = 12;</code>
16078        */
16079       public Builder clearStoreOffset() {
16080         bitField0_ = (bitField0_ & ~0x00000800);
16081         storeOffset_ = 0;
16082         onChanged();
16083         return this;
16084       }
16085 
16086       // optional bool load_column_families_on_demand = 13;
16087       private boolean loadColumnFamiliesOnDemand_ ;
16088       /**
16089        * <code>optional bool load_column_families_on_demand = 13;</code>
16090        *
16091        * <pre>
16092        * DO NOT add defaults to load_column_families_on_demand. 
16093        * </pre>
16094        */
16095       public boolean hasLoadColumnFamiliesOnDemand() {
16096         return ((bitField0_ & 0x00001000) == 0x00001000);
16097       }
16098       /**
16099        * <code>optional bool load_column_families_on_demand = 13;</code>
16100        *
16101        * <pre>
16102        * DO NOT add defaults to load_column_families_on_demand. 
16103        * </pre>
16104        */
16105       public boolean getLoadColumnFamiliesOnDemand() {
16106         return loadColumnFamiliesOnDemand_;
16107       }
16108       /**
16109        * <code>optional bool load_column_families_on_demand = 13;</code>
16110        *
16111        * <pre>
16112        * DO NOT add defaults to load_column_families_on_demand. 
16113        * </pre>
16114        */
16115       public Builder setLoadColumnFamiliesOnDemand(boolean value) {
16116         bitField0_ |= 0x00001000;
16117         loadColumnFamiliesOnDemand_ = value;
16118         onChanged();
16119         return this;
16120       }
16121       /**
16122        * <code>optional bool load_column_families_on_demand = 13;</code>
16123        *
16124        * <pre>
16125        * DO NOT add defaults to load_column_families_on_demand. 
16126        * </pre>
16127        */
16128       public Builder clearLoadColumnFamiliesOnDemand() {
16129         bitField0_ = (bitField0_ & ~0x00001000);
16130         loadColumnFamiliesOnDemand_ = false;
16131         onChanged();
16132         return this;
16133       }
16134 
16135       // optional bool small = 14;
16136       private boolean small_ ;
16137       /**
16138        * <code>optional bool small = 14;</code>
16139        */
16140       public boolean hasSmall() {
16141         return ((bitField0_ & 0x00002000) == 0x00002000);
16142       }
16143       /**
16144        * <code>optional bool small = 14;</code>
16145        */
16146       public boolean getSmall() {
16147         return small_;
16148       }
16149       /**
16150        * <code>optional bool small = 14;</code>
16151        */
16152       public Builder setSmall(boolean value) {
16153         bitField0_ |= 0x00002000;
16154         small_ = value;
16155         onChanged();
16156         return this;
16157       }
16158       /**
16159        * <code>optional bool small = 14;</code>
16160        */
16161       public Builder clearSmall() {
16162         bitField0_ = (bitField0_ & ~0x00002000);
16163         small_ = false;
16164         onChanged();
16165         return this;
16166       }
16167 
16168       // optional bool reversed = 15 [default = false];
16169       private boolean reversed_ ;
16170       /**
16171        * <code>optional bool reversed = 15 [default = false];</code>
16172        */
16173       public boolean hasReversed() {
16174         return ((bitField0_ & 0x00004000) == 0x00004000);
16175       }
16176       /**
16177        * <code>optional bool reversed = 15 [default = false];</code>
16178        */
16179       public boolean getReversed() {
16180         return reversed_;
16181       }
16182       /**
16183        * <code>optional bool reversed = 15 [default = false];</code>
16184        */
16185       public Builder setReversed(boolean value) {
16186         bitField0_ |= 0x00004000;
16187         reversed_ = value;
16188         onChanged();
16189         return this;
16190       }
16191       /**
16192        * <code>optional bool reversed = 15 [default = false];</code>
16193        */
16194       public Builder clearReversed() {
16195         bitField0_ = (bitField0_ & ~0x00004000);
16196         reversed_ = false;
16197         onChanged();
16198         return this;
16199       }
16200 
16201       // optional .hbase.pb.Consistency consistency = 16 [default = STRONG];
16202       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency consistency_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency.STRONG;
16203       /**
16204        * <code>optional .hbase.pb.Consistency consistency = 16 [default = STRONG];</code>
16205        */
16206       public boolean hasConsistency() {
16207         return ((bitField0_ & 0x00008000) == 0x00008000);
16208       }
16209       /**
16210        * <code>optional .hbase.pb.Consistency consistency = 16 [default = STRONG];</code>
16211        */
16212       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency getConsistency() {
16213         return consistency_;
16214       }
16215       /**
16216        * <code>optional .hbase.pb.Consistency consistency = 16 [default = STRONG];</code>
16217        */
16218       public Builder setConsistency(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency value) {
16219         if (value == null) {
16220           throw new NullPointerException();
16221         }
16222         bitField0_ |= 0x00008000;
16223         consistency_ = value;
16224         onChanged();
16225         return this;
16226       }
16227       /**
16228        * <code>optional .hbase.pb.Consistency consistency = 16 [default = STRONG];</code>
16229        */
16230       public Builder clearConsistency() {
16231         bitField0_ = (bitField0_ & ~0x00008000);
16232         consistency_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency.STRONG;
16233         onChanged();
16234         return this;
16235       }
16236 
16237       // optional uint32 caching = 17;
16238       private int caching_ ;
16239       /**
16240        * <code>optional uint32 caching = 17;</code>
16241        */
16242       public boolean hasCaching() {
16243         return ((bitField0_ & 0x00010000) == 0x00010000);
16244       }
16245       /**
16246        * <code>optional uint32 caching = 17;</code>
16247        */
16248       public int getCaching() {
16249         return caching_;
16250       }
16251       /**
16252        * <code>optional uint32 caching = 17;</code>
16253        */
16254       public Builder setCaching(int value) {
16255         bitField0_ |= 0x00010000;
16256         caching_ = value;
16257         onChanged();
16258         return this;
16259       }
16260       /**
16261        * <code>optional uint32 caching = 17;</code>
16262        */
16263       public Builder clearCaching() {
16264         bitField0_ = (bitField0_ & ~0x00010000);
16265         caching_ = 0;
16266         onChanged();
16267         return this;
16268       }
16269 
16270       // optional bool allow_partial_results = 18;
16271       private boolean allowPartialResults_ ;
16272       /**
16273        * <code>optional bool allow_partial_results = 18;</code>
16274        */
16275       public boolean hasAllowPartialResults() {
16276         return ((bitField0_ & 0x00020000) == 0x00020000);
16277       }
16278       /**
16279        * <code>optional bool allow_partial_results = 18;</code>
16280        */
16281       public boolean getAllowPartialResults() {
16282         return allowPartialResults_;
16283       }
16284       /**
16285        * <code>optional bool allow_partial_results = 18;</code>
16286        */
16287       public Builder setAllowPartialResults(boolean value) {
16288         bitField0_ |= 0x00020000;
16289         allowPartialResults_ = value;
16290         onChanged();
16291         return this;
16292       }
16293       /**
16294        * <code>optional bool allow_partial_results = 18;</code>
16295        */
16296       public Builder clearAllowPartialResults() {
16297         bitField0_ = (bitField0_ & ~0x00020000);
16298         allowPartialResults_ = false;
16299         onChanged();
16300         return this;
16301       }
16302 
16303       // @@protoc_insertion_point(builder_scope:hbase.pb.Scan)
16304     }
16305 
16306     static {
16307       defaultInstance = new Scan(true);
16308       defaultInstance.initFields();
16309     }
16310 
16311     // @@protoc_insertion_point(class_scope:hbase.pb.Scan)
16312   }
16313 
16314   public interface ScanRequestOrBuilder
16315       extends com.google.protobuf.MessageOrBuilder {
16316 
16317     // optional .hbase.pb.RegionSpecifier region = 1;
16318     /**
16319      * <code>optional .hbase.pb.RegionSpecifier region = 1;</code>
16320      */
16321     boolean hasRegion();
16322     /**
16323      * <code>optional .hbase.pb.RegionSpecifier region = 1;</code>
16324      */
16325     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
16326     /**
16327      * <code>optional .hbase.pb.RegionSpecifier region = 1;</code>
16328      */
16329     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
16330 
16331     // optional .hbase.pb.Scan scan = 2;
16332     /**
16333      * <code>optional .hbase.pb.Scan scan = 2;</code>
16334      */
16335     boolean hasScan();
16336     /**
16337      * <code>optional .hbase.pb.Scan scan = 2;</code>
16338      */
16339     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan();
16340     /**
16341      * <code>optional .hbase.pb.Scan scan = 2;</code>
16342      */
16343     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder();
16344 
16345     // optional uint64 scanner_id = 3;
16346     /**
16347      * <code>optional uint64 scanner_id = 3;</code>
16348      */
16349     boolean hasScannerId();
16350     /**
16351      * <code>optional uint64 scanner_id = 3;</code>
16352      */
16353     long getScannerId();
16354 
16355     // optional uint32 number_of_rows = 4;
16356     /**
16357      * <code>optional uint32 number_of_rows = 4;</code>
16358      */
16359     boolean hasNumberOfRows();
16360     /**
16361      * <code>optional uint32 number_of_rows = 4;</code>
16362      */
16363     int getNumberOfRows();
16364 
16365     // optional bool close_scanner = 5;
16366     /**
16367      * <code>optional bool close_scanner = 5;</code>
16368      */
16369     boolean hasCloseScanner();
16370     /**
16371      * <code>optional bool close_scanner = 5;</code>
16372      */
16373     boolean getCloseScanner();
16374 
16375     // optional uint64 next_call_seq = 6;
16376     /**
16377      * <code>optional uint64 next_call_seq = 6;</code>
16378      */
16379     boolean hasNextCallSeq();
16380     /**
16381      * <code>optional uint64 next_call_seq = 6;</code>
16382      */
16383     long getNextCallSeq();
16384 
16385     // optional bool client_handles_partials = 7;
16386     /**
16387      * <code>optional bool client_handles_partials = 7;</code>
16388      */
16389     boolean hasClientHandlesPartials();
16390     /**
16391      * <code>optional bool client_handles_partials = 7;</code>
16392      */
16393     boolean getClientHandlesPartials();
16394 
16395     // optional bool client_handles_heartbeats = 8;
16396     /**
16397      * <code>optional bool client_handles_heartbeats = 8;</code>
16398      */
16399     boolean hasClientHandlesHeartbeats();
16400     /**
16401      * <code>optional bool client_handles_heartbeats = 8;</code>
16402      */
16403     boolean getClientHandlesHeartbeats();
16404 
16405     // optional bool track_scan_metrics = 9;
16406     /**
16407      * <code>optional bool track_scan_metrics = 9;</code>
16408      */
16409     boolean hasTrackScanMetrics();
16410     /**
16411      * <code>optional bool track_scan_metrics = 9;</code>
16412      */
16413     boolean getTrackScanMetrics();
16414   }
16415   /**
16416    * Protobuf type {@code hbase.pb.ScanRequest}
16417    *
16418    * <pre>
16419    **
16420    * A scan request. Initially, it should specify a scan. Later on, you
16421    * can use the scanner id returned to fetch result batches with a different
16422    * scan request.
16423    *
16424    * The scanner will remain open if there are more results, and it's not
16425    * asked to be closed explicitly.
16426    *
16427    * You can fetch the results and ask the scanner to be closed to save
16428    * a trip if you are not interested in remaining results.
16429    * </pre>
16430    */
16431   public static final class ScanRequest extends
16432       com.google.protobuf.GeneratedMessage
16433       implements ScanRequestOrBuilder {
16434     // Use ScanRequest.newBuilder() to construct.
16435     private ScanRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
16436       super(builder);
16437       this.unknownFields = builder.getUnknownFields();
16438     }
16439     private ScanRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
16440 
16441     private static final ScanRequest defaultInstance;
16442     public static ScanRequest getDefaultInstance() {
16443       return defaultInstance;
16444     }
16445 
16446     public ScanRequest getDefaultInstanceForType() {
16447       return defaultInstance;
16448     }
16449 
16450     private final com.google.protobuf.UnknownFieldSet unknownFields;
16451     @java.lang.Override
16452     public final com.google.protobuf.UnknownFieldSet
16453         getUnknownFields() {
16454       return this.unknownFields;
16455     }
16456     private ScanRequest(
16457         com.google.protobuf.CodedInputStream input,
16458         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16459         throws com.google.protobuf.InvalidProtocolBufferException {
16460       initFields();
16461       int mutable_bitField0_ = 0;
16462       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
16463           com.google.protobuf.UnknownFieldSet.newBuilder();
16464       try {
16465         boolean done = false;
16466         while (!done) {
16467           int tag = input.readTag();
16468           switch (tag) {
16469             case 0:
16470               done = true;
16471               break;
16472             default: {
16473               if (!parseUnknownField(input, unknownFields,
16474                                      extensionRegistry, tag)) {
16475                 done = true;
16476               }
16477               break;
16478             }
16479             case 10: {
16480               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null;
16481               if (((bitField0_ & 0x00000001) == 0x00000001)) {
16482                 subBuilder = region_.toBuilder();
16483               }
16484               region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry);
16485               if (subBuilder != null) {
16486                 subBuilder.mergeFrom(region_);
16487                 region_ = subBuilder.buildPartial();
16488               }
16489               bitField0_ |= 0x00000001;
16490               break;
16491             }
16492             case 18: {
16493               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder subBuilder = null;
16494               if (((bitField0_ & 0x00000002) == 0x00000002)) {
16495                 subBuilder = scan_.toBuilder();
16496               }
16497               scan_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.PARSER, extensionRegistry);
16498               if (subBuilder != null) {
16499                 subBuilder.mergeFrom(scan_);
16500                 scan_ = subBuilder.buildPartial();
16501               }
16502               bitField0_ |= 0x00000002;
16503               break;
16504             }
16505             case 24: {
16506               bitField0_ |= 0x00000004;
16507               scannerId_ = input.readUInt64();
16508               break;
16509             }
16510             case 32: {
16511               bitField0_ |= 0x00000008;
16512               numberOfRows_ = input.readUInt32();
16513               break;
16514             }
16515             case 40: {
16516               bitField0_ |= 0x00000010;
16517               closeScanner_ = input.readBool();
16518               break;
16519             }
16520             case 48: {
16521               bitField0_ |= 0x00000020;
16522               nextCallSeq_ = input.readUInt64();
16523               break;
16524             }
16525             case 56: {
16526               bitField0_ |= 0x00000040;
16527               clientHandlesPartials_ = input.readBool();
16528               break;
16529             }
16530             case 64: {
16531               bitField0_ |= 0x00000080;
16532               clientHandlesHeartbeats_ = input.readBool();
16533               break;
16534             }
16535             case 72: {
16536               bitField0_ |= 0x00000100;
16537               trackScanMetrics_ = input.readBool();
16538               break;
16539             }
16540           }
16541         }
16542       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
16543         throw e.setUnfinishedMessage(this);
16544       } catch (java.io.IOException e) {
16545         throw new com.google.protobuf.InvalidProtocolBufferException(
16546             e.getMessage()).setUnfinishedMessage(this);
16547       } finally {
16548         this.unknownFields = unknownFields.build();
16549         makeExtensionsImmutable();
16550       }
16551     }
16552     public static final com.google.protobuf.Descriptors.Descriptor
16553         getDescriptor() {
16554       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_ScanRequest_descriptor;
16555     }
16556 
16557     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
16558         internalGetFieldAccessorTable() {
16559       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_ScanRequest_fieldAccessorTable
16560           .ensureFieldAccessorsInitialized(
16561               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.Builder.class);
16562     }
16563 
16564     public static com.google.protobuf.Parser<ScanRequest> PARSER =
16565         new com.google.protobuf.AbstractParser<ScanRequest>() {
16566       public ScanRequest parsePartialFrom(
16567           com.google.protobuf.CodedInputStream input,
16568           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16569           throws com.google.protobuf.InvalidProtocolBufferException {
16570         return new ScanRequest(input, extensionRegistry);
16571       }
16572     };
16573 
16574     @java.lang.Override
16575     public com.google.protobuf.Parser<ScanRequest> getParserForType() {
16576       return PARSER;
16577     }
16578 
16579     private int bitField0_;
16580     // optional .hbase.pb.RegionSpecifier region = 1;
16581     public static final int REGION_FIELD_NUMBER = 1;
16582     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_;
16583     /**
16584      * <code>optional .hbase.pb.RegionSpecifier region = 1;</code>
16585      */
16586     public boolean hasRegion() {
16587       return ((bitField0_ & 0x00000001) == 0x00000001);
16588     }
16589     /**
16590      * <code>optional .hbase.pb.RegionSpecifier region = 1;</code>
16591      */
16592     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
16593       return region_;
16594     }
16595     /**
16596      * <code>optional .hbase.pb.RegionSpecifier region = 1;</code>
16597      */
16598     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
16599       return region_;
16600     }
16601 
16602     // optional .hbase.pb.Scan scan = 2;
16603     public static final int SCAN_FIELD_NUMBER = 2;
16604     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_;
16605     /**
16606      * <code>optional .hbase.pb.Scan scan = 2;</code>
16607      */
16608     public boolean hasScan() {
16609       return ((bitField0_ & 0x00000002) == 0x00000002);
16610     }
16611     /**
16612      * <code>optional .hbase.pb.Scan scan = 2;</code>
16613      */
16614     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() {
16615       return scan_;
16616     }
16617     /**
16618      * <code>optional .hbase.pb.Scan scan = 2;</code>
16619      */
16620     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() {
16621       return scan_;
16622     }
16623 
16624     // optional uint64 scanner_id = 3;
16625     public static final int SCANNER_ID_FIELD_NUMBER = 3;
16626     private long scannerId_;
16627     /**
16628      * <code>optional uint64 scanner_id = 3;</code>
16629      */
16630     public boolean hasScannerId() {
16631       return ((bitField0_ & 0x00000004) == 0x00000004);
16632     }
16633     /**
16634      * <code>optional uint64 scanner_id = 3;</code>
16635      */
16636     public long getScannerId() {
16637       return scannerId_;
16638     }
16639 
16640     // optional uint32 number_of_rows = 4;
16641     public static final int NUMBER_OF_ROWS_FIELD_NUMBER = 4;
16642     private int numberOfRows_;
16643     /**
16644      * <code>optional uint32 number_of_rows = 4;</code>
16645      */
16646     public boolean hasNumberOfRows() {
16647       return ((bitField0_ & 0x00000008) == 0x00000008);
16648     }
16649     /**
16650      * <code>optional uint32 number_of_rows = 4;</code>
16651      */
16652     public int getNumberOfRows() {
16653       return numberOfRows_;
16654     }
16655 
16656     // optional bool close_scanner = 5;
16657     public static final int CLOSE_SCANNER_FIELD_NUMBER = 5;
16658     private boolean closeScanner_;
16659     /**
16660      * <code>optional bool close_scanner = 5;</code>
16661      */
16662     public boolean hasCloseScanner() {
16663       return ((bitField0_ & 0x00000010) == 0x00000010);
16664     }
16665     /**
16666      * <code>optional bool close_scanner = 5;</code>
16667      */
16668     public boolean getCloseScanner() {
16669       return closeScanner_;
16670     }
16671 
16672     // optional uint64 next_call_seq = 6;
16673     public static final int NEXT_CALL_SEQ_FIELD_NUMBER = 6;
16674     private long nextCallSeq_;
16675     /**
16676      * <code>optional uint64 next_call_seq = 6;</code>
16677      */
16678     public boolean hasNextCallSeq() {
16679       return ((bitField0_ & 0x00000020) == 0x00000020);
16680     }
16681     /**
16682      * <code>optional uint64 next_call_seq = 6;</code>
16683      */
16684     public long getNextCallSeq() {
16685       return nextCallSeq_;
16686     }
16687 
16688     // optional bool client_handles_partials = 7;
16689     public static final int CLIENT_HANDLES_PARTIALS_FIELD_NUMBER = 7;
16690     private boolean clientHandlesPartials_;
16691     /**
16692      * <code>optional bool client_handles_partials = 7;</code>
16693      */
16694     public boolean hasClientHandlesPartials() {
16695       return ((bitField0_ & 0x00000040) == 0x00000040);
16696     }
16697     /**
16698      * <code>optional bool client_handles_partials = 7;</code>
16699      */
16700     public boolean getClientHandlesPartials() {
16701       return clientHandlesPartials_;
16702     }
16703 
16704     // optional bool client_handles_heartbeats = 8;
16705     public static final int CLIENT_HANDLES_HEARTBEATS_FIELD_NUMBER = 8;
16706     private boolean clientHandlesHeartbeats_;
16707     /**
16708      * <code>optional bool client_handles_heartbeats = 8;</code>
16709      */
16710     public boolean hasClientHandlesHeartbeats() {
16711       return ((bitField0_ & 0x00000080) == 0x00000080);
16712     }
16713     /**
16714      * <code>optional bool client_handles_heartbeats = 8;</code>
16715      */
16716     public boolean getClientHandlesHeartbeats() {
16717       return clientHandlesHeartbeats_;
16718     }
16719 
16720     // optional bool track_scan_metrics = 9;
16721     public static final int TRACK_SCAN_METRICS_FIELD_NUMBER = 9;
16722     private boolean trackScanMetrics_;
16723     /**
16724      * <code>optional bool track_scan_metrics = 9;</code>
16725      */
16726     public boolean hasTrackScanMetrics() {
16727       return ((bitField0_ & 0x00000100) == 0x00000100);
16728     }
16729     /**
16730      * <code>optional bool track_scan_metrics = 9;</code>
16731      */
16732     public boolean getTrackScanMetrics() {
16733       return trackScanMetrics_;
16734     }
16735 
16736     private void initFields() {
16737       region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
16738       scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
16739       scannerId_ = 0L;
16740       numberOfRows_ = 0;
16741       closeScanner_ = false;
16742       nextCallSeq_ = 0L;
16743       clientHandlesPartials_ = false;
16744       clientHandlesHeartbeats_ = false;
16745       trackScanMetrics_ = false;
16746     }
16747     private byte memoizedIsInitialized = -1;
16748     public final boolean isInitialized() {
16749       byte isInitialized = memoizedIsInitialized;
16750       if (isInitialized != -1) return isInitialized == 1;
16751 
16752       if (hasRegion()) {
16753         if (!getRegion().isInitialized()) {
16754           memoizedIsInitialized = 0;
16755           return false;
16756         }
16757       }
16758       if (hasScan()) {
16759         if (!getScan().isInitialized()) {
16760           memoizedIsInitialized = 0;
16761           return false;
16762         }
16763       }
16764       memoizedIsInitialized = 1;
16765       return true;
16766     }
16767 
16768     public void writeTo(com.google.protobuf.CodedOutputStream output)
16769                         throws java.io.IOException {
16770       getSerializedSize();
16771       if (((bitField0_ & 0x00000001) == 0x00000001)) {
16772         output.writeMessage(1, region_);
16773       }
16774       if (((bitField0_ & 0x00000002) == 0x00000002)) {
16775         output.writeMessage(2, scan_);
16776       }
16777       if (((bitField0_ & 0x00000004) == 0x00000004)) {
16778         output.writeUInt64(3, scannerId_);
16779       }
16780       if (((bitField0_ & 0x00000008) == 0x00000008)) {
16781         output.writeUInt32(4, numberOfRows_);
16782       }
16783       if (((bitField0_ & 0x00000010) == 0x00000010)) {
16784         output.writeBool(5, closeScanner_);
16785       }
16786       if (((bitField0_ & 0x00000020) == 0x00000020)) {
16787         output.writeUInt64(6, nextCallSeq_);
16788       }
16789       if (((bitField0_ & 0x00000040) == 0x00000040)) {
16790         output.writeBool(7, clientHandlesPartials_);
16791       }
16792       if (((bitField0_ & 0x00000080) == 0x00000080)) {
16793         output.writeBool(8, clientHandlesHeartbeats_);
16794       }
16795       if (((bitField0_ & 0x00000100) == 0x00000100)) {
16796         output.writeBool(9, trackScanMetrics_);
16797       }
16798       getUnknownFields().writeTo(output);
16799     }
16800 
16801     private int memoizedSerializedSize = -1;
16802     public int getSerializedSize() {
16803       int size = memoizedSerializedSize;
16804       if (size != -1) return size;
16805 
16806       size = 0;
16807       if (((bitField0_ & 0x00000001) == 0x00000001)) {
16808         size += com.google.protobuf.CodedOutputStream
16809           .computeMessageSize(1, region_);
16810       }
16811       if (((bitField0_ & 0x00000002) == 0x00000002)) {
16812         size += com.google.protobuf.CodedOutputStream
16813           .computeMessageSize(2, scan_);
16814       }
16815       if (((bitField0_ & 0x00000004) == 0x00000004)) {
16816         size += com.google.protobuf.CodedOutputStream
16817           .computeUInt64Size(3, scannerId_);
16818       }
16819       if (((bitField0_ & 0x00000008) == 0x00000008)) {
16820         size += com.google.protobuf.CodedOutputStream
16821           .computeUInt32Size(4, numberOfRows_);
16822       }
16823       if (((bitField0_ & 0x00000010) == 0x00000010)) {
16824         size += com.google.protobuf.CodedOutputStream
16825           .computeBoolSize(5, closeScanner_);
16826       }
16827       if (((bitField0_ & 0x00000020) == 0x00000020)) {
16828         size += com.google.protobuf.CodedOutputStream
16829           .computeUInt64Size(6, nextCallSeq_);
16830       }
16831       if (((bitField0_ & 0x00000040) == 0x00000040)) {
16832         size += com.google.protobuf.CodedOutputStream
16833           .computeBoolSize(7, clientHandlesPartials_);
16834       }
16835       if (((bitField0_ & 0x00000080) == 0x00000080)) {
16836         size += com.google.protobuf.CodedOutputStream
16837           .computeBoolSize(8, clientHandlesHeartbeats_);
16838       }
16839       if (((bitField0_ & 0x00000100) == 0x00000100)) {
16840         size += com.google.protobuf.CodedOutputStream
16841           .computeBoolSize(9, trackScanMetrics_);
16842       }
16843       size += getUnknownFields().getSerializedSize();
16844       memoizedSerializedSize = size;
16845       return size;
16846     }
16847 
16848     private static final long serialVersionUID = 0L;
16849     @java.lang.Override
16850     protected java.lang.Object writeReplace()
16851         throws java.io.ObjectStreamException {
16852       return super.writeReplace();
16853     }
16854 
16855     @java.lang.Override
16856     public boolean equals(final java.lang.Object obj) {
16857       if (obj == this) {
16858        return true;
16859       }
16860       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest)) {
16861         return super.equals(obj);
16862       }
16863       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest) obj;
16864 
16865       boolean result = true;
16866       result = result && (hasRegion() == other.hasRegion());
16867       if (hasRegion()) {
16868         result = result && getRegion()
16869             .equals(other.getRegion());
16870       }
16871       result = result && (hasScan() == other.hasScan());
16872       if (hasScan()) {
16873         result = result && getScan()
16874             .equals(other.getScan());
16875       }
16876       result = result && (hasScannerId() == other.hasScannerId());
16877       if (hasScannerId()) {
16878         result = result && (getScannerId()
16879             == other.getScannerId());
16880       }
16881       result = result && (hasNumberOfRows() == other.hasNumberOfRows());
16882       if (hasNumberOfRows()) {
16883         result = result && (getNumberOfRows()
16884             == other.getNumberOfRows());
16885       }
16886       result = result && (hasCloseScanner() == other.hasCloseScanner());
16887       if (hasCloseScanner()) {
16888         result = result && (getCloseScanner()
16889             == other.getCloseScanner());
16890       }
16891       result = result && (hasNextCallSeq() == other.hasNextCallSeq());
16892       if (hasNextCallSeq()) {
16893         result = result && (getNextCallSeq()
16894             == other.getNextCallSeq());
16895       }
16896       result = result && (hasClientHandlesPartials() == other.hasClientHandlesPartials());
16897       if (hasClientHandlesPartials()) {
16898         result = result && (getClientHandlesPartials()
16899             == other.getClientHandlesPartials());
16900       }
16901       result = result && (hasClientHandlesHeartbeats() == other.hasClientHandlesHeartbeats());
16902       if (hasClientHandlesHeartbeats()) {
16903         result = result && (getClientHandlesHeartbeats()
16904             == other.getClientHandlesHeartbeats());
16905       }
16906       result = result && (hasTrackScanMetrics() == other.hasTrackScanMetrics());
16907       if (hasTrackScanMetrics()) {
16908         result = result && (getTrackScanMetrics()
16909             == other.getTrackScanMetrics());
16910       }
16911       result = result &&
16912           getUnknownFields().equals(other.getUnknownFields());
16913       return result;
16914     }
16915 
16916     private int memoizedHashCode = 0;
16917     @java.lang.Override
16918     public int hashCode() {
16919       if (memoizedHashCode != 0) {
16920         return memoizedHashCode;
16921       }
16922       int hash = 41;
16923       hash = (19 * hash) + getDescriptorForType().hashCode();
16924       if (hasRegion()) {
16925         hash = (37 * hash) + REGION_FIELD_NUMBER;
16926         hash = (53 * hash) + getRegion().hashCode();
16927       }
16928       if (hasScan()) {
16929         hash = (37 * hash) + SCAN_FIELD_NUMBER;
16930         hash = (53 * hash) + getScan().hashCode();
16931       }
16932       if (hasScannerId()) {
16933         hash = (37 * hash) + SCANNER_ID_FIELD_NUMBER;
16934         hash = (53 * hash) + hashLong(getScannerId());
16935       }
16936       if (hasNumberOfRows()) {
16937         hash = (37 * hash) + NUMBER_OF_ROWS_FIELD_NUMBER;
16938         hash = (53 * hash) + getNumberOfRows();
16939       }
16940       if (hasCloseScanner()) {
16941         hash = (37 * hash) + CLOSE_SCANNER_FIELD_NUMBER;
16942         hash = (53 * hash) + hashBoolean(getCloseScanner());
16943       }
16944       if (hasNextCallSeq()) {
16945         hash = (37 * hash) + NEXT_CALL_SEQ_FIELD_NUMBER;
16946         hash = (53 * hash) + hashLong(getNextCallSeq());
16947       }
16948       if (hasClientHandlesPartials()) {
16949         hash = (37 * hash) + CLIENT_HANDLES_PARTIALS_FIELD_NUMBER;
16950         hash = (53 * hash) + hashBoolean(getClientHandlesPartials());
16951       }
16952       if (hasClientHandlesHeartbeats()) {
16953         hash = (37 * hash) + CLIENT_HANDLES_HEARTBEATS_FIELD_NUMBER;
16954         hash = (53 * hash) + hashBoolean(getClientHandlesHeartbeats());
16955       }
16956       if (hasTrackScanMetrics()) {
16957         hash = (37 * hash) + TRACK_SCAN_METRICS_FIELD_NUMBER;
16958         hash = (53 * hash) + hashBoolean(getTrackScanMetrics());
16959       }
16960       hash = (29 * hash) + getUnknownFields().hashCode();
16961       memoizedHashCode = hash;
16962       return hash;
16963     }
16964 
16965     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom(
16966         com.google.protobuf.ByteString data)
16967         throws com.google.protobuf.InvalidProtocolBufferException {
16968       return PARSER.parseFrom(data);
16969     }
16970     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom(
16971         com.google.protobuf.ByteString data,
16972         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16973         throws com.google.protobuf.InvalidProtocolBufferException {
16974       return PARSER.parseFrom(data, extensionRegistry);
16975     }
16976     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom(byte[] data)
16977         throws com.google.protobuf.InvalidProtocolBufferException {
16978       return PARSER.parseFrom(data);
16979     }
16980     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom(
16981         byte[] data,
16982         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16983         throws com.google.protobuf.InvalidProtocolBufferException {
16984       return PARSER.parseFrom(data, extensionRegistry);
16985     }
16986     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom(java.io.InputStream input)
16987         throws java.io.IOException {
16988       return PARSER.parseFrom(input);
16989     }
16990     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom(
16991         java.io.InputStream input,
16992         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16993         throws java.io.IOException {
16994       return PARSER.parseFrom(input, extensionRegistry);
16995     }
16996     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseDelimitedFrom(java.io.InputStream input)
16997         throws java.io.IOException {
16998       return PARSER.parseDelimitedFrom(input);
16999     }
17000     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseDelimitedFrom(
17001         java.io.InputStream input,
17002         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17003         throws java.io.IOException {
17004       return PARSER.parseDelimitedFrom(input, extensionRegistry);
17005     }
17006     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom(
17007         com.google.protobuf.CodedInputStream input)
17008         throws java.io.IOException {
17009       return PARSER.parseFrom(input);
17010     }
17011     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom(
17012         com.google.protobuf.CodedInputStream input,
17013         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17014         throws java.io.IOException {
17015       return PARSER.parseFrom(input, extensionRegistry);
17016     }
17017 
17018     public static Builder newBuilder() { return Builder.create(); }
17019     public Builder newBuilderForType() { return newBuilder(); }
17020     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest prototype) {
17021       return newBuilder().mergeFrom(prototype);
17022     }
17023     public Builder toBuilder() { return newBuilder(this); }
17024 
17025     @java.lang.Override
17026     protected Builder newBuilderForType(
17027         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
17028       Builder builder = new Builder(parent);
17029       return builder;
17030     }
17031     /**
17032      * Protobuf type {@code hbase.pb.ScanRequest}
17033      *
17034      * <pre>
17035      **
17036      * A scan request. Initially, it should specify a scan. Later on, you
17037      * can use the scanner id returned to fetch result batches with a different
17038      * scan request.
17039      *
17040      * The scanner will remain open if there are more results, and it's not
17041      * asked to be closed explicitly.
17042      *
17043      * You can fetch the results and ask the scanner to be closed to save
17044      * a trip if you are not interested in remaining results.
17045      * </pre>
17046      */
17047     public static final class Builder extends
17048         com.google.protobuf.GeneratedMessage.Builder<Builder>
17049        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequestOrBuilder {
17050       public static final com.google.protobuf.Descriptors.Descriptor
17051           getDescriptor() {
17052         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_ScanRequest_descriptor;
17053       }
17054 
17055       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
17056           internalGetFieldAccessorTable() {
17057         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_ScanRequest_fieldAccessorTable
17058             .ensureFieldAccessorsInitialized(
17059                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.Builder.class);
17060       }
17061 
17062       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.newBuilder()
17063       private Builder() {
17064         maybeForceBuilderInitialization();
17065       }
17066 
17067       private Builder(
17068           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
17069         super(parent);
17070         maybeForceBuilderInitialization();
17071       }
17072       private void maybeForceBuilderInitialization() {
17073         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
17074           getRegionFieldBuilder();
17075           getScanFieldBuilder();
17076         }
17077       }
17078       private static Builder create() {
17079         return new Builder();
17080       }
17081 
17082       public Builder clear() {
17083         super.clear();
17084         if (regionBuilder_ == null) {
17085           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
17086         } else {
17087           regionBuilder_.clear();
17088         }
17089         bitField0_ = (bitField0_ & ~0x00000001);
17090         if (scanBuilder_ == null) {
17091           scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
17092         } else {
17093           scanBuilder_.clear();
17094         }
17095         bitField0_ = (bitField0_ & ~0x00000002);
17096         scannerId_ = 0L;
17097         bitField0_ = (bitField0_ & ~0x00000004);
17098         numberOfRows_ = 0;
17099         bitField0_ = (bitField0_ & ~0x00000008);
17100         closeScanner_ = false;
17101         bitField0_ = (bitField0_ & ~0x00000010);
17102         nextCallSeq_ = 0L;
17103         bitField0_ = (bitField0_ & ~0x00000020);
17104         clientHandlesPartials_ = false;
17105         bitField0_ = (bitField0_ & ~0x00000040);
17106         clientHandlesHeartbeats_ = false;
17107         bitField0_ = (bitField0_ & ~0x00000080);
17108         trackScanMetrics_ = false;
17109         bitField0_ = (bitField0_ & ~0x00000100);
17110         return this;
17111       }
17112 
17113       public Builder clone() {
17114         return create().mergeFrom(buildPartial());
17115       }
17116 
17117       public com.google.protobuf.Descriptors.Descriptor
17118           getDescriptorForType() {
17119         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_ScanRequest_descriptor;
17120       }
17121 
17122       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest getDefaultInstanceForType() {
17123         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.getDefaultInstance();
17124       }
17125 
17126       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest build() {
17127         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest result = buildPartial();
17128         if (!result.isInitialized()) {
17129           throw newUninitializedMessageException(result);
17130         }
17131         return result;
17132       }
17133 
17134       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest buildPartial() {
17135         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest(this);
17136         int from_bitField0_ = bitField0_;
17137         int to_bitField0_ = 0;
17138         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
17139           to_bitField0_ |= 0x00000001;
17140         }
17141         if (regionBuilder_ == null) {
17142           result.region_ = region_;
17143         } else {
17144           result.region_ = regionBuilder_.build();
17145         }
17146         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
17147           to_bitField0_ |= 0x00000002;
17148         }
17149         if (scanBuilder_ == null) {
17150           result.scan_ = scan_;
17151         } else {
17152           result.scan_ = scanBuilder_.build();
17153         }
17154         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
17155           to_bitField0_ |= 0x00000004;
17156         }
17157         result.scannerId_ = scannerId_;
17158         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
17159           to_bitField0_ |= 0x00000008;
17160         }
17161         result.numberOfRows_ = numberOfRows_;
17162         if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
17163           to_bitField0_ |= 0x00000010;
17164         }
17165         result.closeScanner_ = closeScanner_;
17166         if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
17167           to_bitField0_ |= 0x00000020;
17168         }
17169         result.nextCallSeq_ = nextCallSeq_;
17170         if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
17171           to_bitField0_ |= 0x00000040;
17172         }
17173         result.clientHandlesPartials_ = clientHandlesPartials_;
17174         if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
17175           to_bitField0_ |= 0x00000080;
17176         }
17177         result.clientHandlesHeartbeats_ = clientHandlesHeartbeats_;
17178         if (((from_bitField0_ & 0x00000100) == 0x00000100)) {
17179           to_bitField0_ |= 0x00000100;
17180         }
17181         result.trackScanMetrics_ = trackScanMetrics_;
17182         result.bitField0_ = to_bitField0_;
17183         onBuilt();
17184         return result;
17185       }
17186 
17187       public Builder mergeFrom(com.google.protobuf.Message other) {
17188         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest) {
17189           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest)other);
17190         } else {
17191           super.mergeFrom(other);
17192           return this;
17193         }
17194       }
17195 
17196       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest other) {
17197         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.getDefaultInstance()) return this;
17198         if (other.hasRegion()) {
17199           mergeRegion(other.getRegion());
17200         }
17201         if (other.hasScan()) {
17202           mergeScan(other.getScan());
17203         }
17204         if (other.hasScannerId()) {
17205           setScannerId(other.getScannerId());
17206         }
17207         if (other.hasNumberOfRows()) {
17208           setNumberOfRows(other.getNumberOfRows());
17209         }
17210         if (other.hasCloseScanner()) {
17211           setCloseScanner(other.getCloseScanner());
17212         }
17213         if (other.hasNextCallSeq()) {
17214           setNextCallSeq(other.getNextCallSeq());
17215         }
17216         if (other.hasClientHandlesPartials()) {
17217           setClientHandlesPartials(other.getClientHandlesPartials());
17218         }
17219         if (other.hasClientHandlesHeartbeats()) {
17220           setClientHandlesHeartbeats(other.getClientHandlesHeartbeats());
17221         }
17222         if (other.hasTrackScanMetrics()) {
17223           setTrackScanMetrics(other.getTrackScanMetrics());
17224         }
17225         this.mergeUnknownFields(other.getUnknownFields());
17226         return this;
17227       }
17228 
17229       public final boolean isInitialized() {
17230         if (hasRegion()) {
17231           if (!getRegion().isInitialized()) {
17232             
17233             return false;
17234           }
17235         }
17236         if (hasScan()) {
17237           if (!getScan().isInitialized()) {
17238             
17239             return false;
17240           }
17241         }
17242         return true;
17243       }
17244 
17245       public Builder mergeFrom(
17246           com.google.protobuf.CodedInputStream input,
17247           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17248           throws java.io.IOException {
17249         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parsedMessage = null;
17250         try {
17251           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
17252         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
17253           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest) e.getUnfinishedMessage();
17254           throw e;
17255         } finally {
17256           if (parsedMessage != null) {
17257             mergeFrom(parsedMessage);
17258           }
17259         }
17260         return this;
17261       }
17262       private int bitField0_;
17263 
17264       // optional .hbase.pb.RegionSpecifier region = 1;
17265       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
17266       private com.google.protobuf.SingleFieldBuilder<
17267           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_;
17268       /**
17269        * <code>optional .hbase.pb.RegionSpecifier region = 1;</code>
17270        */
17271       public boolean hasRegion() {
17272         return ((bitField0_ & 0x00000001) == 0x00000001);
17273       }
17274       /**
17275        * <code>optional .hbase.pb.RegionSpecifier region = 1;</code>
17276        */
17277       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
17278         if (regionBuilder_ == null) {
17279           return region_;
17280         } else {
17281           return regionBuilder_.getMessage();
17282         }
17283       }
17284       /**
17285        * <code>optional .hbase.pb.RegionSpecifier region = 1;</code>
17286        */
17287       public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
17288         if (regionBuilder_ == null) {
17289           if (value == null) {
17290             throw new NullPointerException();
17291           }
17292           region_ = value;
17293           onChanged();
17294         } else {
17295           regionBuilder_.setMessage(value);
17296         }
17297         bitField0_ |= 0x00000001;
17298         return this;
17299       }
17300       /**
17301        * <code>optional .hbase.pb.RegionSpecifier region = 1;</code>
17302        */
17303       public Builder setRegion(
17304           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
17305         if (regionBuilder_ == null) {
17306           region_ = builderForValue.build();
17307           onChanged();
17308         } else {
17309           regionBuilder_.setMessage(builderForValue.build());
17310         }
17311         bitField0_ |= 0x00000001;
17312         return this;
17313       }
17314       /**
17315        * <code>optional .hbase.pb.RegionSpecifier region = 1;</code>
17316        */
17317       public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
17318         if (regionBuilder_ == null) {
17319           if (((bitField0_ & 0x00000001) == 0x00000001) &&
17320               region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) {
17321             region_ =
17322               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial();
17323           } else {
17324             region_ = value;
17325           }
17326           onChanged();
17327         } else {
17328           regionBuilder_.mergeFrom(value);
17329         }
17330         bitField0_ |= 0x00000001;
17331         return this;
17332       }
17333       /**
17334        * <code>optional .hbase.pb.RegionSpecifier region = 1;</code>
17335        */
17336       public Builder clearRegion() {
17337         if (regionBuilder_ == null) {
17338           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
17339           onChanged();
17340         } else {
17341           regionBuilder_.clear();
17342         }
17343         bitField0_ = (bitField0_ & ~0x00000001);
17344         return this;
17345       }
17346       /**
17347        * <code>optional .hbase.pb.RegionSpecifier region = 1;</code>
17348        */
17349       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() {
17350         bitField0_ |= 0x00000001;
17351         onChanged();
17352         return getRegionFieldBuilder().getBuilder();
17353       }
17354       /**
17355        * <code>optional .hbase.pb.RegionSpecifier region = 1;</code>
17356        */
17357       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
17358         if (regionBuilder_ != null) {
17359           return regionBuilder_.getMessageOrBuilder();
17360         } else {
17361           return region_;
17362         }
17363       }
17364       /**
17365        * <code>optional .hbase.pb.RegionSpecifier region = 1;</code>
17366        */
17367       private com.google.protobuf.SingleFieldBuilder<
17368           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> 
17369           getRegionFieldBuilder() {
17370         if (regionBuilder_ == null) {
17371           regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
17372               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
17373                   region_,
17374                   getParentForChildren(),
17375                   isClean());
17376           region_ = null;
17377         }
17378         return regionBuilder_;
17379       }
17380 
17381       // optional .hbase.pb.Scan scan = 2;
17382       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
17383       private com.google.protobuf.SingleFieldBuilder<
17384           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder> scanBuilder_;
17385       /**
17386        * <code>optional .hbase.pb.Scan scan = 2;</code>
17387        */
17388       public boolean hasScan() {
17389         return ((bitField0_ & 0x00000002) == 0x00000002);
17390       }
17391       /**
17392        * <code>optional .hbase.pb.Scan scan = 2;</code>
17393        */
17394       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() {
17395         if (scanBuilder_ == null) {
17396           return scan_;
17397         } else {
17398           return scanBuilder_.getMessage();
17399         }
17400       }
17401       /**
17402        * <code>optional .hbase.pb.Scan scan = 2;</code>
17403        */
17404       public Builder setScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) {
17405         if (scanBuilder_ == null) {
17406           if (value == null) {
17407             throw new NullPointerException();
17408           }
17409           scan_ = value;
17410           onChanged();
17411         } else {
17412           scanBuilder_.setMessage(value);
17413         }
17414         bitField0_ |= 0x00000002;
17415         return this;
17416       }
17417       /**
17418        * <code>optional .hbase.pb.Scan scan = 2;</code>
17419        */
17420       public Builder setScan(
17421           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder builderForValue) {
17422         if (scanBuilder_ == null) {
17423           scan_ = builderForValue.build();
17424           onChanged();
17425         } else {
17426           scanBuilder_.setMessage(builderForValue.build());
17427         }
17428         bitField0_ |= 0x00000002;
17429         return this;
17430       }
17431       /**
17432        * <code>optional .hbase.pb.Scan scan = 2;</code>
17433        */
17434       public Builder mergeScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) {
17435         if (scanBuilder_ == null) {
17436           if (((bitField0_ & 0x00000002) == 0x00000002) &&
17437               scan_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance()) {
17438             scan_ =
17439               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.newBuilder(scan_).mergeFrom(value).buildPartial();
17440           } else {
17441             scan_ = value;
17442           }
17443           onChanged();
17444         } else {
17445           scanBuilder_.mergeFrom(value);
17446         }
17447         bitField0_ |= 0x00000002;
17448         return this;
17449       }
17450       /**
17451        * <code>optional .hbase.pb.Scan scan = 2;</code>
17452        */
17453       public Builder clearScan() {
17454         if (scanBuilder_ == null) {
17455           scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
17456           onChanged();
17457         } else {
17458           scanBuilder_.clear();
17459         }
17460         bitField0_ = (bitField0_ & ~0x00000002);
17461         return this;
17462       }
17463       /**
17464        * <code>optional .hbase.pb.Scan scan = 2;</code>
17465        */
17466       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder getScanBuilder() {
17467         bitField0_ |= 0x00000002;
17468         onChanged();
17469         return getScanFieldBuilder().getBuilder();
17470       }
17471       /**
17472        * <code>optional .hbase.pb.Scan scan = 2;</code>
17473        */
17474       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() {
17475         if (scanBuilder_ != null) {
17476           return scanBuilder_.getMessageOrBuilder();
17477         } else {
17478           return scan_;
17479         }
17480       }
17481       /**
17482        * <code>optional .hbase.pb.Scan scan = 2;</code>
17483        */
17484       private com.google.protobuf.SingleFieldBuilder<
17485           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder> 
17486           getScanFieldBuilder() {
17487         if (scanBuilder_ == null) {
17488           scanBuilder_ = new com.google.protobuf.SingleFieldBuilder<
17489               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder>(
17490                   scan_,
17491                   getParentForChildren(),
17492                   isClean());
17493           scan_ = null;
17494         }
17495         return scanBuilder_;
17496       }
17497 
17498       // optional uint64 scanner_id = 3;
17499       private long scannerId_ ;
17500       /**
17501        * <code>optional uint64 scanner_id = 3;</code>
17502        */
17503       public boolean hasScannerId() {
17504         return ((bitField0_ & 0x00000004) == 0x00000004);
17505       }
17506       /**
17507        * <code>optional uint64 scanner_id = 3;</code>
17508        */
17509       public long getScannerId() {
17510         return scannerId_;
17511       }
17512       /**
17513        * <code>optional uint64 scanner_id = 3;</code>
17514        */
17515       public Builder setScannerId(long value) {
17516         bitField0_ |= 0x00000004;
17517         scannerId_ = value;
17518         onChanged();
17519         return this;
17520       }
17521       /**
17522        * <code>optional uint64 scanner_id = 3;</code>
17523        */
17524       public Builder clearScannerId() {
17525         bitField0_ = (bitField0_ & ~0x00000004);
17526         scannerId_ = 0L;
17527         onChanged();
17528         return this;
17529       }
17530 
17531       // optional uint32 number_of_rows = 4;
17532       private int numberOfRows_ ;
17533       /**
17534        * <code>optional uint32 number_of_rows = 4;</code>
17535        */
17536       public boolean hasNumberOfRows() {
17537         return ((bitField0_ & 0x00000008) == 0x00000008);
17538       }
17539       /**
17540        * <code>optional uint32 number_of_rows = 4;</code>
17541        */
17542       public int getNumberOfRows() {
17543         return numberOfRows_;
17544       }
17545       /**
17546        * <code>optional uint32 number_of_rows = 4;</code>
17547        */
17548       public Builder setNumberOfRows(int value) {
17549         bitField0_ |= 0x00000008;
17550         numberOfRows_ = value;
17551         onChanged();
17552         return this;
17553       }
17554       /**
17555        * <code>optional uint32 number_of_rows = 4;</code>
17556        */
17557       public Builder clearNumberOfRows() {
17558         bitField0_ = (bitField0_ & ~0x00000008);
17559         numberOfRows_ = 0;
17560         onChanged();
17561         return this;
17562       }
17563 
17564       // optional bool close_scanner = 5;
17565       private boolean closeScanner_ ;
17566       /**
17567        * <code>optional bool close_scanner = 5;</code>
17568        */
17569       public boolean hasCloseScanner() {
17570         return ((bitField0_ & 0x00000010) == 0x00000010);
17571       }
17572       /**
17573        * <code>optional bool close_scanner = 5;</code>
17574        */
17575       public boolean getCloseScanner() {
17576         return closeScanner_;
17577       }
17578       /**
17579        * <code>optional bool close_scanner = 5;</code>
17580        */
17581       public Builder setCloseScanner(boolean value) {
17582         bitField0_ |= 0x00000010;
17583         closeScanner_ = value;
17584         onChanged();
17585         return this;
17586       }
17587       /**
17588        * <code>optional bool close_scanner = 5;</code>
17589        */
17590       public Builder clearCloseScanner() {
17591         bitField0_ = (bitField0_ & ~0x00000010);
17592         closeScanner_ = false;
17593         onChanged();
17594         return this;
17595       }
17596 
17597       // optional uint64 next_call_seq = 6;
17598       private long nextCallSeq_ ;
17599       /**
17600        * <code>optional uint64 next_call_seq = 6;</code>
17601        */
17602       public boolean hasNextCallSeq() {
17603         return ((bitField0_ & 0x00000020) == 0x00000020);
17604       }
17605       /**
17606        * <code>optional uint64 next_call_seq = 6;</code>
17607        */
17608       public long getNextCallSeq() {
17609         return nextCallSeq_;
17610       }
17611       /**
17612        * <code>optional uint64 next_call_seq = 6;</code>
17613        */
17614       public Builder setNextCallSeq(long value) {
17615         bitField0_ |= 0x00000020;
17616         nextCallSeq_ = value;
17617         onChanged();
17618         return this;
17619       }
17620       /**
17621        * <code>optional uint64 next_call_seq = 6;</code>
17622        */
17623       public Builder clearNextCallSeq() {
17624         bitField0_ = (bitField0_ & ~0x00000020);
17625         nextCallSeq_ = 0L;
17626         onChanged();
17627         return this;
17628       }
17629 
17630       // optional bool client_handles_partials = 7;
17631       private boolean clientHandlesPartials_ ;
17632       /**
17633        * <code>optional bool client_handles_partials = 7;</code>
17634        */
17635       public boolean hasClientHandlesPartials() {
17636         return ((bitField0_ & 0x00000040) == 0x00000040);
17637       }
17638       /**
17639        * <code>optional bool client_handles_partials = 7;</code>
17640        */
17641       public boolean getClientHandlesPartials() {
17642         return clientHandlesPartials_;
17643       }
17644       /**
17645        * <code>optional bool client_handles_partials = 7;</code>
17646        */
17647       public Builder setClientHandlesPartials(boolean value) {
17648         bitField0_ |= 0x00000040;
17649         clientHandlesPartials_ = value;
17650         onChanged();
17651         return this;
17652       }
17653       /**
17654        * <code>optional bool client_handles_partials = 7;</code>
17655        */
17656       public Builder clearClientHandlesPartials() {
17657         bitField0_ = (bitField0_ & ~0x00000040);
17658         clientHandlesPartials_ = false;
17659         onChanged();
17660         return this;
17661       }
17662 
17663       // optional bool client_handles_heartbeats = 8;
17664       private boolean clientHandlesHeartbeats_ ;
17665       /**
17666        * <code>optional bool client_handles_heartbeats = 8;</code>
17667        */
17668       public boolean hasClientHandlesHeartbeats() {
17669         return ((bitField0_ & 0x00000080) == 0x00000080);
17670       }
17671       /**
17672        * <code>optional bool client_handles_heartbeats = 8;</code>
17673        */
17674       public boolean getClientHandlesHeartbeats() {
17675         return clientHandlesHeartbeats_;
17676       }
17677       /**
17678        * <code>optional bool client_handles_heartbeats = 8;</code>
17679        */
17680       public Builder setClientHandlesHeartbeats(boolean value) {
17681         bitField0_ |= 0x00000080;
17682         clientHandlesHeartbeats_ = value;
17683         onChanged();
17684         return this;
17685       }
17686       /**
17687        * <code>optional bool client_handles_heartbeats = 8;</code>
17688        */
17689       public Builder clearClientHandlesHeartbeats() {
17690         bitField0_ = (bitField0_ & ~0x00000080);
17691         clientHandlesHeartbeats_ = false;
17692         onChanged();
17693         return this;
17694       }
17695 
17696       // optional bool track_scan_metrics = 9;
17697       private boolean trackScanMetrics_ ;
17698       /**
17699        * <code>optional bool track_scan_metrics = 9;</code>
17700        */
17701       public boolean hasTrackScanMetrics() {
17702         return ((bitField0_ & 0x00000100) == 0x00000100);
17703       }
17704       /**
17705        * <code>optional bool track_scan_metrics = 9;</code>
17706        */
17707       public boolean getTrackScanMetrics() {
17708         return trackScanMetrics_;
17709       }
17710       /**
17711        * <code>optional bool track_scan_metrics = 9;</code>
17712        */
17713       public Builder setTrackScanMetrics(boolean value) {
17714         bitField0_ |= 0x00000100;
17715         trackScanMetrics_ = value;
17716         onChanged();
17717         return this;
17718       }
17719       /**
17720        * <code>optional bool track_scan_metrics = 9;</code>
17721        */
17722       public Builder clearTrackScanMetrics() {
17723         bitField0_ = (bitField0_ & ~0x00000100);
17724         trackScanMetrics_ = false;
17725         onChanged();
17726         return this;
17727       }
17728 
17729       // @@protoc_insertion_point(builder_scope:hbase.pb.ScanRequest)
17730     }
17731 
17732     static {
17733       defaultInstance = new ScanRequest(true);
17734       defaultInstance.initFields();
17735     }
17736 
17737     // @@protoc_insertion_point(class_scope:hbase.pb.ScanRequest)
17738   }
17739 
17740   public interface ScanResponseOrBuilder
17741       extends com.google.protobuf.MessageOrBuilder {
17742 
17743     // repeated uint32 cells_per_result = 1;
17744     /**
17745      * <code>repeated uint32 cells_per_result = 1;</code>
17746      *
17747      * <pre>
17748      * This field is filled in if we are doing cellblocks.  A cellblock is made up
17749      * of all Cells serialized out as one cellblock BUT responses from a server
17750      * have their Cells grouped by Result.  So we can reconstitute the
17751      * Results on the client-side, this field is a list of counts of Cells
17752      * in each Result that makes up the response.  For example, if this field
17753      * has 3, 3, 3 in it, then we know that on the client, we are to make
17754      * three Results each of three Cells each.
17755      * </pre>
17756      */
17757     java.util.List<java.lang.Integer> getCellsPerResultList();
17758     /**
17759      * <code>repeated uint32 cells_per_result = 1;</code>
17760      *
17761      * <pre>
17762      * This field is filled in if we are doing cellblocks.  A cellblock is made up
17763      * of all Cells serialized out as one cellblock BUT responses from a server
17764      * have their Cells grouped by Result.  So we can reconstitute the
17765      * Results on the client-side, this field is a list of counts of Cells
17766      * in each Result that makes up the response.  For example, if this field
17767      * has 3, 3, 3 in it, then we know that on the client, we are to make
17768      * three Results each of three Cells each.
17769      * </pre>
17770      */
17771     int getCellsPerResultCount();
17772     /**
17773      * <code>repeated uint32 cells_per_result = 1;</code>
17774      *
17775      * <pre>
17776      * This field is filled in if we are doing cellblocks.  A cellblock is made up
17777      * of all Cells serialized out as one cellblock BUT responses from a server
17778      * have their Cells grouped by Result.  So we can reconstitute the
17779      * Results on the client-side, this field is a list of counts of Cells
17780      * in each Result that makes up the response.  For example, if this field
17781      * has 3, 3, 3 in it, then we know that on the client, we are to make
17782      * three Results each of three Cells each.
17783      * </pre>
17784      */
17785     int getCellsPerResult(int index);
17786 
17787     // optional uint64 scanner_id = 2;
17788     /**
17789      * <code>optional uint64 scanner_id = 2;</code>
17790      */
17791     boolean hasScannerId();
17792     /**
17793      * <code>optional uint64 scanner_id = 2;</code>
17794      */
17795     long getScannerId();
17796 
17797     // optional bool more_results = 3;
17798     /**
17799      * <code>optional bool more_results = 3;</code>
17800      */
17801     boolean hasMoreResults();
17802     /**
17803      * <code>optional bool more_results = 3;</code>
17804      */
17805     boolean getMoreResults();
17806 
17807     // optional uint32 ttl = 4;
17808     /**
17809      * <code>optional uint32 ttl = 4;</code>
17810      */
17811     boolean hasTtl();
17812     /**
17813      * <code>optional uint32 ttl = 4;</code>
17814      */
17815     int getTtl();
17816 
17817     // repeated .hbase.pb.Result results = 5;
17818     /**
17819      * <code>repeated .hbase.pb.Result results = 5;</code>
17820      *
17821      * <pre>
17822      * If cells are not carried in an accompanying cellblock, then they are pb'd here.
17823      * This field is mutually exclusive with cells_per_result (since the Cells will
17824      * be inside the pb'd Result)
17825      * </pre>
17826      */
17827     java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result> 
17828         getResultsList();
17829     /**
17830      * <code>repeated .hbase.pb.Result results = 5;</code>
17831      *
17832      * <pre>
17833      * If cells are not carried in an accompanying cellblock, then they are pb'd here.
17834      * This field is mutually exclusive with cells_per_result (since the Cells will
17835      * be inside the pb'd Result)
17836      * </pre>
17837      */
17838     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResults(int index);
17839     /**
17840      * <code>repeated .hbase.pb.Result results = 5;</code>
17841      *
17842      * <pre>
17843      * If cells are not carried in an accompanying cellblock, then they are pb'd here.
17844      * This field is mutually exclusive with cells_per_result (since the Cells will
17845      * be inside the pb'd Result)
17846      * </pre>
17847      */
17848     int getResultsCount();
17849     /**
17850      * <code>repeated .hbase.pb.Result results = 5;</code>
17851      *
17852      * <pre>
17853      * If cells are not carried in an accompanying cellblock, then they are pb'd here.
17854      * This field is mutually exclusive with cells_per_result (since the Cells will
17855      * be inside the pb'd Result)
17856      * </pre>
17857      */
17858     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> 
17859         getResultsOrBuilderList();
17860     /**
17861      * <code>repeated .hbase.pb.Result results = 5;</code>
17862      *
17863      * <pre>
17864      * If cells are not carried in an accompanying cellblock, then they are pb'd here.
17865      * This field is mutually exclusive with cells_per_result (since the Cells will
17866      * be inside the pb'd Result)
17867      * </pre>
17868      */
17869     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultsOrBuilder(
17870         int index);
17871 
17872     // optional bool stale = 6;
17873     /**
17874      * <code>optional bool stale = 6;</code>
17875      */
17876     boolean hasStale();
17877     /**
17878      * <code>optional bool stale = 6;</code>
17879      */
17880     boolean getStale();
17881 
17882     // repeated bool partial_flag_per_result = 7;
17883     /**
17884      * <code>repeated bool partial_flag_per_result = 7;</code>
17885      *
17886      * <pre>
17887      * This field is filled in if we are doing cellblocks. In the event that a row
17888      * could not fit all of its cells into a single RPC chunk, the results will be
17889      * returned as partials, and reconstructed into a complete result on the client
17890      * side. This field is a list of flags indicating whether or not the result
17891      * that the cells belong to is a partial result. For example, if this field
17892      * has false, false, true in it, then we know that on the client side, we need to
17893      * make another RPC request since the last result was only a partial.
17894      * </pre>
17895      */
17896     java.util.List<java.lang.Boolean> getPartialFlagPerResultList();
17897     /**
17898      * <code>repeated bool partial_flag_per_result = 7;</code>
17899      *
17900      * <pre>
17901      * This field is filled in if we are doing cellblocks. In the event that a row
17902      * could not fit all of its cells into a single RPC chunk, the results will be
17903      * returned as partials, and reconstructed into a complete result on the client
17904      * side. This field is a list of flags indicating whether or not the result
17905      * that the cells belong to is a partial result. For example, if this field
17906      * has false, false, true in it, then we know that on the client side, we need to
17907      * make another RPC request since the last result was only a partial.
17908      * </pre>
17909      */
17910     int getPartialFlagPerResultCount();
17911     /**
17912      * <code>repeated bool partial_flag_per_result = 7;</code>
17913      *
17914      * <pre>
17915      * This field is filled in if we are doing cellblocks. In the event that a row
17916      * could not fit all of its cells into a single RPC chunk, the results will be
17917      * returned as partials, and reconstructed into a complete result on the client
17918      * side. This field is a list of flags indicating whether or not the result
17919      * that the cells belong to is a partial result. For example, if this field
17920      * has false, false, true in it, then we know that on the client side, we need to
17921      * make another RPC request since the last result was only a partial.
17922      * </pre>
17923      */
17924     boolean getPartialFlagPerResult(int index);
17925 
17926     // optional bool more_results_in_region = 8;
17927     /**
17928      * <code>optional bool more_results_in_region = 8;</code>
17929      *
17930      * <pre>
17931      * A server may choose to limit the number of results returned to the client for
17932      * reasons such as the size in bytes or quantity of results accumulated. This field
17933      * will true when more results exist in the current region.
17934      * </pre>
17935      */
17936     boolean hasMoreResultsInRegion();
17937     /**
17938      * <code>optional bool more_results_in_region = 8;</code>
17939      *
17940      * <pre>
17941      * A server may choose to limit the number of results returned to the client for
17942      * reasons such as the size in bytes or quantity of results accumulated. This field
17943      * will true when more results exist in the current region.
17944      * </pre>
17945      */
17946     boolean getMoreResultsInRegion();
17947 
17948     // optional bool heartbeat_message = 9;
17949     /**
17950      * <code>optional bool heartbeat_message = 9;</code>
17951      *
17952      * <pre>
17953      * This field is filled in if the server is sending back a heartbeat message.
17954      * Heartbeat messages are sent back to the client to prevent the scanner from
17955      * timing out. Seeing a heartbeat message communicates to the Client that the
17956      * server would have continued to scan had the time limit not been reached.
17957      * </pre>
17958      */
17959     boolean hasHeartbeatMessage();
17960     /**
17961      * <code>optional bool heartbeat_message = 9;</code>
17962      *
17963      * <pre>
17964      * This field is filled in if the server is sending back a heartbeat message.
17965      * Heartbeat messages are sent back to the client to prevent the scanner from
17966      * timing out. Seeing a heartbeat message communicates to the Client that the
17967      * server would have continued to scan had the time limit not been reached.
17968      * </pre>
17969      */
17970     boolean getHeartbeatMessage();
17971 
17972     // optional .hbase.pb.ScanMetrics scan_metrics = 10;
17973     /**
17974      * <code>optional .hbase.pb.ScanMetrics scan_metrics = 10;</code>
17975      *
17976      * <pre>
17977      * This field is filled in if the client has requested that scan metrics be tracked.
17978      * The metrics tracked here are sent back to the client to be tracked together with 
17979      * the existing client side metrics.
17980      * </pre>
17981      */
17982     boolean hasScanMetrics();
17983     /**
17984      * <code>optional .hbase.pb.ScanMetrics scan_metrics = 10;</code>
17985      *
17986      * <pre>
17987      * This field is filled in if the client has requested that scan metrics be tracked.
17988      * The metrics tracked here are sent back to the client to be tracked together with 
17989      * the existing client side metrics.
17990      * </pre>
17991      */
17992     org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics getScanMetrics();
17993     /**
17994      * <code>optional .hbase.pb.ScanMetrics scan_metrics = 10;</code>
17995      *
17996      * <pre>
17997      * This field is filled in if the client has requested that scan metrics be tracked.
17998      * The metrics tracked here are sent back to the client to be tracked together with 
17999      * the existing client side metrics.
18000      * </pre>
18001      */
18002     org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetricsOrBuilder getScanMetricsOrBuilder();
18003   }
18004   /**
18005    * Protobuf type {@code hbase.pb.ScanResponse}
18006    *
18007    * <pre>
18008    **
18009    * The scan response. If there are no more results, more_results will
18010    * be false.  If it is not specified, it means there are more.
18011    * </pre>
18012    */
18013   public static final class ScanResponse extends
18014       com.google.protobuf.GeneratedMessage
18015       implements ScanResponseOrBuilder {
18016     // Use ScanResponse.newBuilder() to construct.
18017     private ScanResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
18018       super(builder);
18019       this.unknownFields = builder.getUnknownFields();
18020     }
18021     private ScanResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
18022 
18023     private static final ScanResponse defaultInstance;
18024     public static ScanResponse getDefaultInstance() {
18025       return defaultInstance;
18026     }
18027 
18028     public ScanResponse getDefaultInstanceForType() {
18029       return defaultInstance;
18030     }
18031 
18032     private final com.google.protobuf.UnknownFieldSet unknownFields;
18033     @java.lang.Override
18034     public final com.google.protobuf.UnknownFieldSet
18035         getUnknownFields() {
18036       return this.unknownFields;
18037     }
18038     private ScanResponse(
18039         com.google.protobuf.CodedInputStream input,
18040         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18041         throws com.google.protobuf.InvalidProtocolBufferException {
18042       initFields();
18043       int mutable_bitField0_ = 0;
18044       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
18045           com.google.protobuf.UnknownFieldSet.newBuilder();
18046       try {
18047         boolean done = false;
18048         while (!done) {
18049           int tag = input.readTag();
18050           switch (tag) {
18051             case 0:
18052               done = true;
18053               break;
18054             default: {
18055               if (!parseUnknownField(input, unknownFields,
18056                                      extensionRegistry, tag)) {
18057                 done = true;
18058               }
18059               break;
18060             }
18061             case 8: {
18062               if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
18063                 cellsPerResult_ = new java.util.ArrayList<java.lang.Integer>();
18064                 mutable_bitField0_ |= 0x00000001;
18065               }
18066               cellsPerResult_.add(input.readUInt32());
18067               break;
18068             }
18069             case 10: {
18070               int length = input.readRawVarint32();
18071               int limit = input.pushLimit(length);
18072               if (!((mutable_bitField0_ & 0x00000001) == 0x00000001) && input.getBytesUntilLimit() > 0) {
18073                 cellsPerResult_ = new java.util.ArrayList<java.lang.Integer>();
18074                 mutable_bitField0_ |= 0x00000001;
18075               }
18076               while (input.getBytesUntilLimit() > 0) {
18077                 cellsPerResult_.add(input.readUInt32());
18078               }
18079               input.popLimit(limit);
18080               break;
18081             }
18082             case 16: {
18083               bitField0_ |= 0x00000001;
18084               scannerId_ = input.readUInt64();
18085               break;
18086             }
18087             case 24: {
18088               bitField0_ |= 0x00000002;
18089               moreResults_ = input.readBool();
18090               break;
18091             }
18092             case 32: {
18093               bitField0_ |= 0x00000004;
18094               ttl_ = input.readUInt32();
18095               break;
18096             }
18097             case 42: {
18098               if (!((mutable_bitField0_ & 0x00000010) == 0x00000010)) {
18099                 results_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result>();
18100                 mutable_bitField0_ |= 0x00000010;
18101               }
18102               results_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.PARSER, extensionRegistry));
18103               break;
18104             }
18105             case 48: {
18106               bitField0_ |= 0x00000008;
18107               stale_ = input.readBool();
18108               break;
18109             }
18110             case 56: {
18111               if (!((mutable_bitField0_ & 0x00000040) == 0x00000040)) {
18112                 partialFlagPerResult_ = new java.util.ArrayList<java.lang.Boolean>();
18113                 mutable_bitField0_ |= 0x00000040;
18114               }
18115               partialFlagPerResult_.add(input.readBool());
18116               break;
18117             }
18118             case 58: {
18119               int length = input.readRawVarint32();
18120               int limit = input.pushLimit(length);
18121               if (!((mutable_bitField0_ & 0x00000040) == 0x00000040) && input.getBytesUntilLimit() > 0) {
18122                 partialFlagPerResult_ = new java.util.ArrayList<java.lang.Boolean>();
18123                 mutable_bitField0_ |= 0x00000040;
18124               }
18125               while (input.getBytesUntilLimit() > 0) {
18126                 partialFlagPerResult_.add(input.readBool());
18127               }
18128               input.popLimit(limit);
18129               break;
18130             }
18131             case 64: {
18132               bitField0_ |= 0x00000010;
18133               moreResultsInRegion_ = input.readBool();
18134               break;
18135             }
18136             case 72: {
18137               bitField0_ |= 0x00000020;
18138               heartbeatMessage_ = input.readBool();
18139               break;
18140             }
18141             case 82: {
18142               org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.Builder subBuilder = null;
18143               if (((bitField0_ & 0x00000040) == 0x00000040)) {
18144                 subBuilder = scanMetrics_.toBuilder();
18145               }
18146               scanMetrics_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.PARSER, extensionRegistry);
18147               if (subBuilder != null) {
18148                 subBuilder.mergeFrom(scanMetrics_);
18149                 scanMetrics_ = subBuilder.buildPartial();
18150               }
18151               bitField0_ |= 0x00000040;
18152               break;
18153             }
18154           }
18155         }
18156       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
18157         throw e.setUnfinishedMessage(this);
18158       } catch (java.io.IOException e) {
18159         throw new com.google.protobuf.InvalidProtocolBufferException(
18160             e.getMessage()).setUnfinishedMessage(this);
18161       } finally {
18162         if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
18163           cellsPerResult_ = java.util.Collections.unmodifiableList(cellsPerResult_);
18164         }
18165         if (((mutable_bitField0_ & 0x00000010) == 0x00000010)) {
18166           results_ = java.util.Collections.unmodifiableList(results_);
18167         }
18168         if (((mutable_bitField0_ & 0x00000040) == 0x00000040)) {
18169           partialFlagPerResult_ = java.util.Collections.unmodifiableList(partialFlagPerResult_);
18170         }
18171         this.unknownFields = unknownFields.build();
18172         makeExtensionsImmutable();
18173       }
18174     }
18175     public static final com.google.protobuf.Descriptors.Descriptor
18176         getDescriptor() {
18177       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_ScanResponse_descriptor;
18178     }
18179 
18180     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
18181         internalGetFieldAccessorTable() {
18182       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_ScanResponse_fieldAccessorTable
18183           .ensureFieldAccessorsInitialized(
18184               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.Builder.class);
18185     }
18186 
18187     public static com.google.protobuf.Parser<ScanResponse> PARSER =
18188         new com.google.protobuf.AbstractParser<ScanResponse>() {
18189       public ScanResponse parsePartialFrom(
18190           com.google.protobuf.CodedInputStream input,
18191           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18192           throws com.google.protobuf.InvalidProtocolBufferException {
18193         return new ScanResponse(input, extensionRegistry);
18194       }
18195     };
18196 
18197     @java.lang.Override
18198     public com.google.protobuf.Parser<ScanResponse> getParserForType() {
18199       return PARSER;
18200     }
18201 
18202     private int bitField0_;
18203     // repeated uint32 cells_per_result = 1;
18204     public static final int CELLS_PER_RESULT_FIELD_NUMBER = 1;
18205     private java.util.List<java.lang.Integer> cellsPerResult_;
18206     /**
18207      * <code>repeated uint32 cells_per_result = 1;</code>
18208      *
18209      * <pre>
18210      * This field is filled in if we are doing cellblocks.  A cellblock is made up
18211      * of all Cells serialized out as one cellblock BUT responses from a server
18212      * have their Cells grouped by Result.  So we can reconstitute the
18213      * Results on the client-side, this field is a list of counts of Cells
18214      * in each Result that makes up the response.  For example, if this field
18215      * has 3, 3, 3 in it, then we know that on the client, we are to make
18216      * three Results each of three Cells each.
18217      * </pre>
18218      */
18219     public java.util.List<java.lang.Integer>
18220         getCellsPerResultList() {
18221       return cellsPerResult_;
18222     }
18223     /**
18224      * <code>repeated uint32 cells_per_result = 1;</code>
18225      *
18226      * <pre>
18227      * This field is filled in if we are doing cellblocks.  A cellblock is made up
18228      * of all Cells serialized out as one cellblock BUT responses from a server
18229      * have their Cells grouped by Result.  So we can reconstitute the
18230      * Results on the client-side, this field is a list of counts of Cells
18231      * in each Result that makes up the response.  For example, if this field
18232      * has 3, 3, 3 in it, then we know that on the client, we are to make
18233      * three Results each of three Cells each.
18234      * </pre>
18235      */
18236     public int getCellsPerResultCount() {
18237       return cellsPerResult_.size();
18238     }
18239     /**
18240      * <code>repeated uint32 cells_per_result = 1;</code>
18241      *
18242      * <pre>
18243      * This field is filled in if we are doing cellblocks.  A cellblock is made up
18244      * of all Cells serialized out as one cellblock BUT responses from a server
18245      * have their Cells grouped by Result.  So we can reconstitute the
18246      * Results on the client-side, this field is a list of counts of Cells
18247      * in each Result that makes up the response.  For example, if this field
18248      * has 3, 3, 3 in it, then we know that on the client, we are to make
18249      * three Results each of three Cells each.
18250      * </pre>
18251      */
18252     public int getCellsPerResult(int index) {
18253       return cellsPerResult_.get(index);
18254     }
18255 
18256     // optional uint64 scanner_id = 2;
18257     public static final int SCANNER_ID_FIELD_NUMBER = 2;
18258     private long scannerId_;
18259     /**
18260      * <code>optional uint64 scanner_id = 2;</code>
18261      */
18262     public boolean hasScannerId() {
18263       return ((bitField0_ & 0x00000001) == 0x00000001);
18264     }
18265     /**
18266      * <code>optional uint64 scanner_id = 2;</code>
18267      */
18268     public long getScannerId() {
18269       return scannerId_;
18270     }
18271 
18272     // optional bool more_results = 3;
18273     public static final int MORE_RESULTS_FIELD_NUMBER = 3;
18274     private boolean moreResults_;
18275     /**
18276      * <code>optional bool more_results = 3;</code>
18277      */
18278     public boolean hasMoreResults() {
18279       return ((bitField0_ & 0x00000002) == 0x00000002);
18280     }
18281     /**
18282      * <code>optional bool more_results = 3;</code>
18283      */
18284     public boolean getMoreResults() {
18285       return moreResults_;
18286     }
18287 
18288     // optional uint32 ttl = 4;
18289     public static final int TTL_FIELD_NUMBER = 4;
18290     private int ttl_;
18291     /**
18292      * <code>optional uint32 ttl = 4;</code>
18293      */
18294     public boolean hasTtl() {
18295       return ((bitField0_ & 0x00000004) == 0x00000004);
18296     }
18297     /**
18298      * <code>optional uint32 ttl = 4;</code>
18299      */
18300     public int getTtl() {
18301       return ttl_;
18302     }
18303 
18304     // repeated .hbase.pb.Result results = 5;
18305     public static final int RESULTS_FIELD_NUMBER = 5;
18306     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result> results_;
18307     /**
18308      * <code>repeated .hbase.pb.Result results = 5;</code>
18309      *
18310      * <pre>
18311      * If cells are not carried in an accompanying cellblock, then they are pb'd here.
18312      * This field is mutually exclusive with cells_per_result (since the Cells will
18313      * be inside the pb'd Result)
18314      * </pre>
18315      */
18316     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result> getResultsList() {
18317       return results_;
18318     }
18319     /**
18320      * <code>repeated .hbase.pb.Result results = 5;</code>
18321      *
18322      * <pre>
18323      * If cells are not carried in an accompanying cellblock, then they are pb'd here.
18324      * This field is mutually exclusive with cells_per_result (since the Cells will
18325      * be inside the pb'd Result)
18326      * </pre>
18327      */
18328     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> 
18329         getResultsOrBuilderList() {
18330       return results_;
18331     }
18332     /**
18333      * <code>repeated .hbase.pb.Result results = 5;</code>
18334      *
18335      * <pre>
18336      * If cells are not carried in an accompanying cellblock, then they are pb'd here.
18337      * This field is mutually exclusive with cells_per_result (since the Cells will
18338      * be inside the pb'd Result)
18339      * </pre>
18340      */
18341     public int getResultsCount() {
18342       return results_.size();
18343     }
18344     /**
18345      * <code>repeated .hbase.pb.Result results = 5;</code>
18346      *
18347      * <pre>
18348      * If cells are not carried in an accompanying cellblock, then they are pb'd here.
18349      * This field is mutually exclusive with cells_per_result (since the Cells will
18350      * be inside the pb'd Result)
18351      * </pre>
18352      */
18353     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResults(int index) {
18354       return results_.get(index);
18355     }
18356     /**
18357      * <code>repeated .hbase.pb.Result results = 5;</code>
18358      *
18359      * <pre>
18360      * If cells are not carried in an accompanying cellblock, then they are pb'd here.
18361      * This field is mutually exclusive with cells_per_result (since the Cells will
18362      * be inside the pb'd Result)
18363      * </pre>
18364      */
18365     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultsOrBuilder(
18366         int index) {
18367       return results_.get(index);
18368     }
18369 
18370     // optional bool stale = 6;
18371     public static final int STALE_FIELD_NUMBER = 6;
18372     private boolean stale_;
18373     /**
18374      * <code>optional bool stale = 6;</code>
18375      */
18376     public boolean hasStale() {
18377       return ((bitField0_ & 0x00000008) == 0x00000008);
18378     }
18379     /**
18380      * <code>optional bool stale = 6;</code>
18381      */
18382     public boolean getStale() {
18383       return stale_;
18384     }
18385 
18386     // repeated bool partial_flag_per_result = 7;
18387     public static final int PARTIAL_FLAG_PER_RESULT_FIELD_NUMBER = 7;
18388     private java.util.List<java.lang.Boolean> partialFlagPerResult_;
18389     /**
18390      * <code>repeated bool partial_flag_per_result = 7;</code>
18391      *
18392      * <pre>
18393      * This field is filled in if we are doing cellblocks. In the event that a row
18394      * could not fit all of its cells into a single RPC chunk, the results will be
18395      * returned as partials, and reconstructed into a complete result on the client
18396      * side. This field is a list of flags indicating whether or not the result
18397      * that the cells belong to is a partial result. For example, if this field
18398      * has false, false, true in it, then we know that on the client side, we need to
18399      * make another RPC request since the last result was only a partial.
18400      * </pre>
18401      */
18402     public java.util.List<java.lang.Boolean>
18403         getPartialFlagPerResultList() {
18404       return partialFlagPerResult_;
18405     }
18406     /**
18407      * <code>repeated bool partial_flag_per_result = 7;</code>
18408      *
18409      * <pre>
18410      * This field is filled in if we are doing cellblocks. In the event that a row
18411      * could not fit all of its cells into a single RPC chunk, the results will be
18412      * returned as partials, and reconstructed into a complete result on the client
18413      * side. This field is a list of flags indicating whether or not the result
18414      * that the cells belong to is a partial result. For example, if this field
18415      * has false, false, true in it, then we know that on the client side, we need to
18416      * make another RPC request since the last result was only a partial.
18417      * </pre>
18418      */
18419     public int getPartialFlagPerResultCount() {
18420       return partialFlagPerResult_.size();
18421     }
18422     /**
18423      * <code>repeated bool partial_flag_per_result = 7;</code>
18424      *
18425      * <pre>
18426      * This field is filled in if we are doing cellblocks. In the event that a row
18427      * could not fit all of its cells into a single RPC chunk, the results will be
18428      * returned as partials, and reconstructed into a complete result on the client
18429      * side. This field is a list of flags indicating whether or not the result
18430      * that the cells belong to is a partial result. For example, if this field
18431      * has false, false, true in it, then we know that on the client side, we need to
18432      * make another RPC request since the last result was only a partial.
18433      * </pre>
18434      */
18435     public boolean getPartialFlagPerResult(int index) {
18436       return partialFlagPerResult_.get(index);
18437     }
18438 
18439     // optional bool more_results_in_region = 8;
18440     public static final int MORE_RESULTS_IN_REGION_FIELD_NUMBER = 8;
18441     private boolean moreResultsInRegion_;
18442     /**
18443      * <code>optional bool more_results_in_region = 8;</code>
18444      *
18445      * <pre>
18446      * A server may choose to limit the number of results returned to the client for
18447      * reasons such as the size in bytes or quantity of results accumulated. This field
18448      * will true when more results exist in the current region.
18449      * </pre>
18450      */
18451     public boolean hasMoreResultsInRegion() {
18452       return ((bitField0_ & 0x00000010) == 0x00000010);
18453     }
18454     /**
18455      * <code>optional bool more_results_in_region = 8;</code>
18456      *
18457      * <pre>
18458      * A server may choose to limit the number of results returned to the client for
18459      * reasons such as the size in bytes or quantity of results accumulated. This field
18460      * will true when more results exist in the current region.
18461      * </pre>
18462      */
18463     public boolean getMoreResultsInRegion() {
18464       return moreResultsInRegion_;
18465     }
18466 
18467     // optional bool heartbeat_message = 9;
18468     public static final int HEARTBEAT_MESSAGE_FIELD_NUMBER = 9;
18469     private boolean heartbeatMessage_;
18470     /**
18471      * <code>optional bool heartbeat_message = 9;</code>
18472      *
18473      * <pre>
18474      * This field is filled in if the server is sending back a heartbeat message.
18475      * Heartbeat messages are sent back to the client to prevent the scanner from
18476      * timing out. Seeing a heartbeat message communicates to the Client that the
18477      * server would have continued to scan had the time limit not been reached.
18478      * </pre>
18479      */
18480     public boolean hasHeartbeatMessage() {
18481       return ((bitField0_ & 0x00000020) == 0x00000020);
18482     }
18483     /**
18484      * <code>optional bool heartbeat_message = 9;</code>
18485      *
18486      * <pre>
18487      * This field is filled in if the server is sending back a heartbeat message.
18488      * Heartbeat messages are sent back to the client to prevent the scanner from
18489      * timing out. Seeing a heartbeat message communicates to the Client that the
18490      * server would have continued to scan had the time limit not been reached.
18491      * </pre>
18492      */
18493     public boolean getHeartbeatMessage() {
18494       return heartbeatMessage_;
18495     }
18496 
18497     // optional .hbase.pb.ScanMetrics scan_metrics = 10;
18498     public static final int SCAN_METRICS_FIELD_NUMBER = 10;
18499     private org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics scanMetrics_;
18500     /**
18501      * <code>optional .hbase.pb.ScanMetrics scan_metrics = 10;</code>
18502      *
18503      * <pre>
18504      * This field is filled in if the client has requested that scan metrics be tracked.
18505      * The metrics tracked here are sent back to the client to be tracked together with 
18506      * the existing client side metrics.
18507      * </pre>
18508      */
18509     public boolean hasScanMetrics() {
18510       return ((bitField0_ & 0x00000040) == 0x00000040);
18511     }
18512     /**
18513      * <code>optional .hbase.pb.ScanMetrics scan_metrics = 10;</code>
18514      *
18515      * <pre>
18516      * This field is filled in if the client has requested that scan metrics be tracked.
18517      * The metrics tracked here are sent back to the client to be tracked together with 
18518      * the existing client side metrics.
18519      * </pre>
18520      */
18521     public org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics getScanMetrics() {
18522       return scanMetrics_;
18523     }
18524     /**
18525      * <code>optional .hbase.pb.ScanMetrics scan_metrics = 10;</code>
18526      *
18527      * <pre>
18528      * This field is filled in if the client has requested that scan metrics be tracked.
18529      * The metrics tracked here are sent back to the client to be tracked together with 
18530      * the existing client side metrics.
18531      * </pre>
18532      */
18533     public org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetricsOrBuilder getScanMetricsOrBuilder() {
18534       return scanMetrics_;
18535     }
18536 
18537     private void initFields() {
18538       cellsPerResult_ = java.util.Collections.emptyList();
18539       scannerId_ = 0L;
18540       moreResults_ = false;
18541       ttl_ = 0;
18542       results_ = java.util.Collections.emptyList();
18543       stale_ = false;
18544       partialFlagPerResult_ = java.util.Collections.emptyList();
18545       moreResultsInRegion_ = false;
18546       heartbeatMessage_ = false;
18547       scanMetrics_ = org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.getDefaultInstance();
18548     }
18549     private byte memoizedIsInitialized = -1;
18550     public final boolean isInitialized() {
18551       byte isInitialized = memoizedIsInitialized;
18552       if (isInitialized != -1) return isInitialized == 1;
18553 
18554       memoizedIsInitialized = 1;
18555       return true;
18556     }
18557 
18558     public void writeTo(com.google.protobuf.CodedOutputStream output)
18559                         throws java.io.IOException {
18560       getSerializedSize();
18561       for (int i = 0; i < cellsPerResult_.size(); i++) {
18562         output.writeUInt32(1, cellsPerResult_.get(i));
18563       }
18564       if (((bitField0_ & 0x00000001) == 0x00000001)) {
18565         output.writeUInt64(2, scannerId_);
18566       }
18567       if (((bitField0_ & 0x00000002) == 0x00000002)) {
18568         output.writeBool(3, moreResults_);
18569       }
18570       if (((bitField0_ & 0x00000004) == 0x00000004)) {
18571         output.writeUInt32(4, ttl_);
18572       }
18573       for (int i = 0; i < results_.size(); i++) {
18574         output.writeMessage(5, results_.get(i));
18575       }
18576       if (((bitField0_ & 0x00000008) == 0x00000008)) {
18577         output.writeBool(6, stale_);
18578       }
18579       for (int i = 0; i < partialFlagPerResult_.size(); i++) {
18580         output.writeBool(7, partialFlagPerResult_.get(i));
18581       }
18582       if (((bitField0_ & 0x00000010) == 0x00000010)) {
18583         output.writeBool(8, moreResultsInRegion_);
18584       }
18585       if (((bitField0_ & 0x00000020) == 0x00000020)) {
18586         output.writeBool(9, heartbeatMessage_);
18587       }
18588       if (((bitField0_ & 0x00000040) == 0x00000040)) {
18589         output.writeMessage(10, scanMetrics_);
18590       }
18591       getUnknownFields().writeTo(output);
18592     }
18593 
18594     private int memoizedSerializedSize = -1;
18595     public int getSerializedSize() {
18596       int size = memoizedSerializedSize;
18597       if (size != -1) return size;
18598 
18599       size = 0;
18600       {
18601         int dataSize = 0;
18602         for (int i = 0; i < cellsPerResult_.size(); i++) {
18603           dataSize += com.google.protobuf.CodedOutputStream
18604             .computeUInt32SizeNoTag(cellsPerResult_.get(i));
18605         }
18606         size += dataSize;
18607         size += 1 * getCellsPerResultList().size();
18608       }
18609       if (((bitField0_ & 0x00000001) == 0x00000001)) {
18610         size += com.google.protobuf.CodedOutputStream
18611           .computeUInt64Size(2, scannerId_);
18612       }
18613       if (((bitField0_ & 0x00000002) == 0x00000002)) {
18614         size += com.google.protobuf.CodedOutputStream
18615           .computeBoolSize(3, moreResults_);
18616       }
18617       if (((bitField0_ & 0x00000004) == 0x00000004)) {
18618         size += com.google.protobuf.CodedOutputStream
18619           .computeUInt32Size(4, ttl_);
18620       }
18621       for (int i = 0; i < results_.size(); i++) {
18622         size += com.google.protobuf.CodedOutputStream
18623           .computeMessageSize(5, results_.get(i));
18624       }
18625       if (((bitField0_ & 0x00000008) == 0x00000008)) {
18626         size += com.google.protobuf.CodedOutputStream
18627           .computeBoolSize(6, stale_);
18628       }
18629       {
18630         int dataSize = 0;
18631         dataSize = 1 * getPartialFlagPerResultList().size();
18632         size += dataSize;
18633         size += 1 * getPartialFlagPerResultList().size();
18634       }
18635       if (((bitField0_ & 0x00000010) == 0x00000010)) {
18636         size += com.google.protobuf.CodedOutputStream
18637           .computeBoolSize(8, moreResultsInRegion_);
18638       }
18639       if (((bitField0_ & 0x00000020) == 0x00000020)) {
18640         size += com.google.protobuf.CodedOutputStream
18641           .computeBoolSize(9, heartbeatMessage_);
18642       }
18643       if (((bitField0_ & 0x00000040) == 0x00000040)) {
18644         size += com.google.protobuf.CodedOutputStream
18645           .computeMessageSize(10, scanMetrics_);
18646       }
18647       size += getUnknownFields().getSerializedSize();
18648       memoizedSerializedSize = size;
18649       return size;
18650     }
18651 
18652     private static final long serialVersionUID = 0L;
18653     @java.lang.Override
18654     protected java.lang.Object writeReplace()
18655         throws java.io.ObjectStreamException {
18656       return super.writeReplace();
18657     }
18658 
18659     @java.lang.Override
18660     public boolean equals(final java.lang.Object obj) {
18661       if (obj == this) {
18662        return true;
18663       }
18664       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse)) {
18665         return super.equals(obj);
18666       }
18667       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse) obj;
18668 
18669       boolean result = true;
18670       result = result && getCellsPerResultList()
18671           .equals(other.getCellsPerResultList());
18672       result = result && (hasScannerId() == other.hasScannerId());
18673       if (hasScannerId()) {
18674         result = result && (getScannerId()
18675             == other.getScannerId());
18676       }
18677       result = result && (hasMoreResults() == other.hasMoreResults());
18678       if (hasMoreResults()) {
18679         result = result && (getMoreResults()
18680             == other.getMoreResults());
18681       }
18682       result = result && (hasTtl() == other.hasTtl());
18683       if (hasTtl()) {
18684         result = result && (getTtl()
18685             == other.getTtl());
18686       }
18687       result = result && getResultsList()
18688           .equals(other.getResultsList());
18689       result = result && (hasStale() == other.hasStale());
18690       if (hasStale()) {
18691         result = result && (getStale()
18692             == other.getStale());
18693       }
18694       result = result && getPartialFlagPerResultList()
18695           .equals(other.getPartialFlagPerResultList());
18696       result = result && (hasMoreResultsInRegion() == other.hasMoreResultsInRegion());
18697       if (hasMoreResultsInRegion()) {
18698         result = result && (getMoreResultsInRegion()
18699             == other.getMoreResultsInRegion());
18700       }
18701       result = result && (hasHeartbeatMessage() == other.hasHeartbeatMessage());
18702       if (hasHeartbeatMessage()) {
18703         result = result && (getHeartbeatMessage()
18704             == other.getHeartbeatMessage());
18705       }
18706       result = result && (hasScanMetrics() == other.hasScanMetrics());
18707       if (hasScanMetrics()) {
18708         result = result && getScanMetrics()
18709             .equals(other.getScanMetrics());
18710       }
18711       result = result &&
18712           getUnknownFields().equals(other.getUnknownFields());
18713       return result;
18714     }
18715 
18716     private int memoizedHashCode = 0;
18717     @java.lang.Override
18718     public int hashCode() {
18719       if (memoizedHashCode != 0) {
18720         return memoizedHashCode;
18721       }
18722       int hash = 41;
18723       hash = (19 * hash) + getDescriptorForType().hashCode();
18724       if (getCellsPerResultCount() > 0) {
18725         hash = (37 * hash) + CELLS_PER_RESULT_FIELD_NUMBER;
18726         hash = (53 * hash) + getCellsPerResultList().hashCode();
18727       }
18728       if (hasScannerId()) {
18729         hash = (37 * hash) + SCANNER_ID_FIELD_NUMBER;
18730         hash = (53 * hash) + hashLong(getScannerId());
18731       }
18732       if (hasMoreResults()) {
18733         hash = (37 * hash) + MORE_RESULTS_FIELD_NUMBER;
18734         hash = (53 * hash) + hashBoolean(getMoreResults());
18735       }
18736       if (hasTtl()) {
18737         hash = (37 * hash) + TTL_FIELD_NUMBER;
18738         hash = (53 * hash) + getTtl();
18739       }
18740       if (getResultsCount() > 0) {
18741         hash = (37 * hash) + RESULTS_FIELD_NUMBER;
18742         hash = (53 * hash) + getResultsList().hashCode();
18743       }
18744       if (hasStale()) {
18745         hash = (37 * hash) + STALE_FIELD_NUMBER;
18746         hash = (53 * hash) + hashBoolean(getStale());
18747       }
18748       if (getPartialFlagPerResultCount() > 0) {
18749         hash = (37 * hash) + PARTIAL_FLAG_PER_RESULT_FIELD_NUMBER;
18750         hash = (53 * hash) + getPartialFlagPerResultList().hashCode();
18751       }
18752       if (hasMoreResultsInRegion()) {
18753         hash = (37 * hash) + MORE_RESULTS_IN_REGION_FIELD_NUMBER;
18754         hash = (53 * hash) + hashBoolean(getMoreResultsInRegion());
18755       }
18756       if (hasHeartbeatMessage()) {
18757         hash = (37 * hash) + HEARTBEAT_MESSAGE_FIELD_NUMBER;
18758         hash = (53 * hash) + hashBoolean(getHeartbeatMessage());
18759       }
18760       if (hasScanMetrics()) {
18761         hash = (37 * hash) + SCAN_METRICS_FIELD_NUMBER;
18762         hash = (53 * hash) + getScanMetrics().hashCode();
18763       }
18764       hash = (29 * hash) + getUnknownFields().hashCode();
18765       memoizedHashCode = hash;
18766       return hash;
18767     }
18768 
18769     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom(
18770         com.google.protobuf.ByteString data)
18771         throws com.google.protobuf.InvalidProtocolBufferException {
18772       return PARSER.parseFrom(data);
18773     }
18774     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom(
18775         com.google.protobuf.ByteString data,
18776         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18777         throws com.google.protobuf.InvalidProtocolBufferException {
18778       return PARSER.parseFrom(data, extensionRegistry);
18779     }
18780     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom(byte[] data)
18781         throws com.google.protobuf.InvalidProtocolBufferException {
18782       return PARSER.parseFrom(data);
18783     }
18784     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom(
18785         byte[] data,
18786         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18787         throws com.google.protobuf.InvalidProtocolBufferException {
18788       return PARSER.parseFrom(data, extensionRegistry);
18789     }
18790     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom(java.io.InputStream input)
18791         throws java.io.IOException {
18792       return PARSER.parseFrom(input);
18793     }
18794     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom(
18795         java.io.InputStream input,
18796         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18797         throws java.io.IOException {
18798       return PARSER.parseFrom(input, extensionRegistry);
18799     }
18800     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseDelimitedFrom(java.io.InputStream input)
18801         throws java.io.IOException {
18802       return PARSER.parseDelimitedFrom(input);
18803     }
18804     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseDelimitedFrom(
18805         java.io.InputStream input,
18806         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18807         throws java.io.IOException {
18808       return PARSER.parseDelimitedFrom(input, extensionRegistry);
18809     }
18810     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom(
18811         com.google.protobuf.CodedInputStream input)
18812         throws java.io.IOException {
18813       return PARSER.parseFrom(input);
18814     }
18815     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom(
18816         com.google.protobuf.CodedInputStream input,
18817         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18818         throws java.io.IOException {
18819       return PARSER.parseFrom(input, extensionRegistry);
18820     }
18821 
18822     public static Builder newBuilder() { return Builder.create(); }
18823     public Builder newBuilderForType() { return newBuilder(); }
18824     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse prototype) {
18825       return newBuilder().mergeFrom(prototype);
18826     }
18827     public Builder toBuilder() { return newBuilder(this); }
18828 
18829     @java.lang.Override
18830     protected Builder newBuilderForType(
18831         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
18832       Builder builder = new Builder(parent);
18833       return builder;
18834     }
18835     /**
18836      * Protobuf type {@code hbase.pb.ScanResponse}
18837      *
18838      * <pre>
18839      **
18840      * The scan response. If there are no more results, more_results will
18841      * be false.  If it is not specified, it means there are more.
18842      * </pre>
18843      */
18844     public static final class Builder extends
18845         com.google.protobuf.GeneratedMessage.Builder<Builder>
18846        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponseOrBuilder {
18847       public static final com.google.protobuf.Descriptors.Descriptor
18848           getDescriptor() {
18849         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_ScanResponse_descriptor;
18850       }
18851 
18852       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
18853           internalGetFieldAccessorTable() {
18854         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_ScanResponse_fieldAccessorTable
18855             .ensureFieldAccessorsInitialized(
18856                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.Builder.class);
18857       }
18858 
18859       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.newBuilder()
18860       private Builder() {
18861         maybeForceBuilderInitialization();
18862       }
18863 
18864       private Builder(
18865           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
18866         super(parent);
18867         maybeForceBuilderInitialization();
18868       }
18869       private void maybeForceBuilderInitialization() {
18870         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
18871           getResultsFieldBuilder();
18872           getScanMetricsFieldBuilder();
18873         }
18874       }
18875       private static Builder create() {
18876         return new Builder();
18877       }
18878 
18879       public Builder clear() {
18880         super.clear();
18881         cellsPerResult_ = java.util.Collections.emptyList();
18882         bitField0_ = (bitField0_ & ~0x00000001);
18883         scannerId_ = 0L;
18884         bitField0_ = (bitField0_ & ~0x00000002);
18885         moreResults_ = false;
18886         bitField0_ = (bitField0_ & ~0x00000004);
18887         ttl_ = 0;
18888         bitField0_ = (bitField0_ & ~0x00000008);
18889         if (resultsBuilder_ == null) {
18890           results_ = java.util.Collections.emptyList();
18891           bitField0_ = (bitField0_ & ~0x00000010);
18892         } else {
18893           resultsBuilder_.clear();
18894         }
18895         stale_ = false;
18896         bitField0_ = (bitField0_ & ~0x00000020);
18897         partialFlagPerResult_ = java.util.Collections.emptyList();
18898         bitField0_ = (bitField0_ & ~0x00000040);
18899         moreResultsInRegion_ = false;
18900         bitField0_ = (bitField0_ & ~0x00000080);
18901         heartbeatMessage_ = false;
18902         bitField0_ = (bitField0_ & ~0x00000100);
18903         if (scanMetricsBuilder_ == null) {
18904           scanMetrics_ = org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.getDefaultInstance();
18905         } else {
18906           scanMetricsBuilder_.clear();
18907         }
18908         bitField0_ = (bitField0_ & ~0x00000200);
18909         return this;
18910       }
18911 
18912       public Builder clone() {
18913         return create().mergeFrom(buildPartial());
18914       }
18915 
18916       public com.google.protobuf.Descriptors.Descriptor
18917           getDescriptorForType() {
18918         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_ScanResponse_descriptor;
18919       }
18920 
18921       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse getDefaultInstanceForType() {
18922         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance();
18923       }
18924 
18925       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse build() {
18926         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse result = buildPartial();
18927         if (!result.isInitialized()) {
18928           throw newUninitializedMessageException(result);
18929         }
18930         return result;
18931       }
18932 
18933       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse buildPartial() {
18934         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse(this);
18935         int from_bitField0_ = bitField0_;
18936         int to_bitField0_ = 0;
18937         if (((bitField0_ & 0x00000001) == 0x00000001)) {
18938           cellsPerResult_ = java.util.Collections.unmodifiableList(cellsPerResult_);
18939           bitField0_ = (bitField0_ & ~0x00000001);
18940         }
18941         result.cellsPerResult_ = cellsPerResult_;
18942         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
18943           to_bitField0_ |= 0x00000001;
18944         }
18945         result.scannerId_ = scannerId_;
18946         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
18947           to_bitField0_ |= 0x00000002;
18948         }
18949         result.moreResults_ = moreResults_;
18950         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
18951           to_bitField0_ |= 0x00000004;
18952         }
18953         result.ttl_ = ttl_;
18954         if (resultsBuilder_ == null) {
18955           if (((bitField0_ & 0x00000010) == 0x00000010)) {
18956             results_ = java.util.Collections.unmodifiableList(results_);
18957             bitField0_ = (bitField0_ & ~0x00000010);
18958           }
18959           result.results_ = results_;
18960         } else {
18961           result.results_ = resultsBuilder_.build();
18962         }
18963         if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
18964           to_bitField0_ |= 0x00000008;
18965         }
18966         result.stale_ = stale_;
18967         if (((bitField0_ & 0x00000040) == 0x00000040)) {
18968           partialFlagPerResult_ = java.util.Collections.unmodifiableList(partialFlagPerResult_);
18969           bitField0_ = (bitField0_ & ~0x00000040);
18970         }
18971         result.partialFlagPerResult_ = partialFlagPerResult_;
18972         if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
18973           to_bitField0_ |= 0x00000010;
18974         }
18975         result.moreResultsInRegion_ = moreResultsInRegion_;
18976         if (((from_bitField0_ & 0x00000100) == 0x00000100)) {
18977           to_bitField0_ |= 0x00000020;
18978         }
18979         result.heartbeatMessage_ = heartbeatMessage_;
18980         if (((from_bitField0_ & 0x00000200) == 0x00000200)) {
18981           to_bitField0_ |= 0x00000040;
18982         }
18983         if (scanMetricsBuilder_ == null) {
18984           result.scanMetrics_ = scanMetrics_;
18985         } else {
18986           result.scanMetrics_ = scanMetricsBuilder_.build();
18987         }
18988         result.bitField0_ = to_bitField0_;
18989         onBuilt();
18990         return result;
18991       }
18992 
18993       public Builder mergeFrom(com.google.protobuf.Message other) {
18994         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse) {
18995           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse)other);
18996         } else {
18997           super.mergeFrom(other);
18998           return this;
18999         }
19000       }
19001 
19002       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse other) {
19003         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance()) return this;
19004         if (!other.cellsPerResult_.isEmpty()) {
19005           if (cellsPerResult_.isEmpty()) {
19006             cellsPerResult_ = other.cellsPerResult_;
19007             bitField0_ = (bitField0_ & ~0x00000001);
19008           } else {
19009             ensureCellsPerResultIsMutable();
19010             cellsPerResult_.addAll(other.cellsPerResult_);
19011           }
19012           onChanged();
19013         }
19014         if (other.hasScannerId()) {
19015           setScannerId(other.getScannerId());
19016         }
19017         if (other.hasMoreResults()) {
19018           setMoreResults(other.getMoreResults());
19019         }
19020         if (other.hasTtl()) {
19021           setTtl(other.getTtl());
19022         }
19023         if (resultsBuilder_ == null) {
19024           if (!other.results_.isEmpty()) {
19025             if (results_.isEmpty()) {
19026               results_ = other.results_;
19027               bitField0_ = (bitField0_ & ~0x00000010);
19028             } else {
19029               ensureResultsIsMutable();
19030               results_.addAll(other.results_);
19031             }
19032             onChanged();
19033           }
19034         } else {
19035           if (!other.results_.isEmpty()) {
19036             if (resultsBuilder_.isEmpty()) {
19037               resultsBuilder_.dispose();
19038               resultsBuilder_ = null;
19039               results_ = other.results_;
19040               bitField0_ = (bitField0_ & ~0x00000010);
19041               resultsBuilder_ = 
19042                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
19043                    getResultsFieldBuilder() : null;
19044             } else {
19045               resultsBuilder_.addAllMessages(other.results_);
19046             }
19047           }
19048         }
19049         if (other.hasStale()) {
19050           setStale(other.getStale());
19051         }
19052         if (!other.partialFlagPerResult_.isEmpty()) {
19053           if (partialFlagPerResult_.isEmpty()) {
19054             partialFlagPerResult_ = other.partialFlagPerResult_;
19055             bitField0_ = (bitField0_ & ~0x00000040);
19056           } else {
19057             ensurePartialFlagPerResultIsMutable();
19058             partialFlagPerResult_.addAll(other.partialFlagPerResult_);
19059           }
19060           onChanged();
19061         }
19062         if (other.hasMoreResultsInRegion()) {
19063           setMoreResultsInRegion(other.getMoreResultsInRegion());
19064         }
19065         if (other.hasHeartbeatMessage()) {
19066           setHeartbeatMessage(other.getHeartbeatMessage());
19067         }
19068         if (other.hasScanMetrics()) {
19069           mergeScanMetrics(other.getScanMetrics());
19070         }
19071         this.mergeUnknownFields(other.getUnknownFields());
19072         return this;
19073       }
19074 
19075       public final boolean isInitialized() {
19076         return true;
19077       }
19078 
19079       public Builder mergeFrom(
19080           com.google.protobuf.CodedInputStream input,
19081           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19082           throws java.io.IOException {
19083         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parsedMessage = null;
19084         try {
19085           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
19086         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
19087           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse) e.getUnfinishedMessage();
19088           throw e;
19089         } finally {
19090           if (parsedMessage != null) {
19091             mergeFrom(parsedMessage);
19092           }
19093         }
19094         return this;
19095       }
19096       private int bitField0_;
19097 
19098       // repeated uint32 cells_per_result = 1;
19099       private java.util.List<java.lang.Integer> cellsPerResult_ = java.util.Collections.emptyList();
19100       private void ensureCellsPerResultIsMutable() {
19101         if (!((bitField0_ & 0x00000001) == 0x00000001)) {
19102           cellsPerResult_ = new java.util.ArrayList<java.lang.Integer>(cellsPerResult_);
19103           bitField0_ |= 0x00000001;
19104          }
19105       }
19106       /**
19107        * <code>repeated uint32 cells_per_result = 1;</code>
19108        *
19109        * <pre>
19110        * This field is filled in if we are doing cellblocks.  A cellblock is made up
19111        * of all Cells serialized out as one cellblock BUT responses from a server
19112        * have their Cells grouped by Result.  So we can reconstitute the
19113        * Results on the client-side, this field is a list of counts of Cells
19114        * in each Result that makes up the response.  For example, if this field
19115        * has 3, 3, 3 in it, then we know that on the client, we are to make
19116        * three Results each of three Cells each.
19117        * </pre>
19118        */
19119       public java.util.List<java.lang.Integer>
19120           getCellsPerResultList() {
19121         return java.util.Collections.unmodifiableList(cellsPerResult_);
19122       }
19123       /**
19124        * <code>repeated uint32 cells_per_result = 1;</code>
19125        *
19126        * <pre>
19127        * This field is filled in if we are doing cellblocks.  A cellblock is made up
19128        * of all Cells serialized out as one cellblock BUT responses from a server
19129        * have their Cells grouped by Result.  So we can reconstitute the
19130        * Results on the client-side, this field is a list of counts of Cells
19131        * in each Result that makes up the response.  For example, if this field
19132        * has 3, 3, 3 in it, then we know that on the client, we are to make
19133        * three Results each of three Cells each.
19134        * </pre>
19135        */
19136       public int getCellsPerResultCount() {
19137         return cellsPerResult_.size();
19138       }
19139       /**
19140        * <code>repeated uint32 cells_per_result = 1;</code>
19141        *
19142        * <pre>
19143        * This field is filled in if we are doing cellblocks.  A cellblock is made up
19144        * of all Cells serialized out as one cellblock BUT responses from a server
19145        * have their Cells grouped by Result.  So we can reconstitute the
19146        * Results on the client-side, this field is a list of counts of Cells
19147        * in each Result that makes up the response.  For example, if this field
19148        * has 3, 3, 3 in it, then we know that on the client, we are to make
19149        * three Results each of three Cells each.
19150        * </pre>
19151        */
19152       public int getCellsPerResult(int index) {
19153         return cellsPerResult_.get(index);
19154       }
19155       /**
19156        * <code>repeated uint32 cells_per_result = 1;</code>
19157        *
19158        * <pre>
19159        * This field is filled in if we are doing cellblocks.  A cellblock is made up
19160        * of all Cells serialized out as one cellblock BUT responses from a server
19161        * have their Cells grouped by Result.  So we can reconstitute the
19162        * Results on the client-side, this field is a list of counts of Cells
19163        * in each Result that makes up the response.  For example, if this field
19164        * has 3, 3, 3 in it, then we know that on the client, we are to make
19165        * three Results each of three Cells each.
19166        * </pre>
19167        */
19168       public Builder setCellsPerResult(
19169           int index, int value) {
19170         ensureCellsPerResultIsMutable();
19171         cellsPerResult_.set(index, value);
19172         onChanged();
19173         return this;
19174       }
19175       /**
19176        * <code>repeated uint32 cells_per_result = 1;</code>
19177        *
19178        * <pre>
19179        * This field is filled in if we are doing cellblocks.  A cellblock is made up
19180        * of all Cells serialized out as one cellblock BUT responses from a server
19181        * have their Cells grouped by Result.  So we can reconstitute the
19182        * Results on the client-side, this field is a list of counts of Cells
19183        * in each Result that makes up the response.  For example, if this field
19184        * has 3, 3, 3 in it, then we know that on the client, we are to make
19185        * three Results each of three Cells each.
19186        * </pre>
19187        */
19188       public Builder addCellsPerResult(int value) {
19189         ensureCellsPerResultIsMutable();
19190         cellsPerResult_.add(value);
19191         onChanged();
19192         return this;
19193       }
19194       /**
19195        * <code>repeated uint32 cells_per_result = 1;</code>
19196        *
19197        * <pre>
19198        * This field is filled in if we are doing cellblocks.  A cellblock is made up
19199        * of all Cells serialized out as one cellblock BUT responses from a server
19200        * have their Cells grouped by Result.  So we can reconstitute the
19201        * Results on the client-side, this field is a list of counts of Cells
19202        * in each Result that makes up the response.  For example, if this field
19203        * has 3, 3, 3 in it, then we know that on the client, we are to make
19204        * three Results each of three Cells each.
19205        * </pre>
19206        */
19207       public Builder addAllCellsPerResult(
19208           java.lang.Iterable<? extends java.lang.Integer> values) {
19209         ensureCellsPerResultIsMutable();
19210         super.addAll(values, cellsPerResult_);
19211         onChanged();
19212         return this;
19213       }
19214       /**
19215        * <code>repeated uint32 cells_per_result = 1;</code>
19216        *
19217        * <pre>
19218        * This field is filled in if we are doing cellblocks.  A cellblock is made up
19219        * of all Cells serialized out as one cellblock BUT responses from a server
19220        * have their Cells grouped by Result.  So we can reconstitute the
19221        * Results on the client-side, this field is a list of counts of Cells
19222        * in each Result that makes up the response.  For example, if this field
19223        * has 3, 3, 3 in it, then we know that on the client, we are to make
19224        * three Results each of three Cells each.
19225        * </pre>
19226        */
19227       public Builder clearCellsPerResult() {
19228         cellsPerResult_ = java.util.Collections.emptyList();
19229         bitField0_ = (bitField0_ & ~0x00000001);
19230         onChanged();
19231         return this;
19232       }
19233 
19234       // optional uint64 scanner_id = 2;
19235       private long scannerId_ ;
19236       /**
19237        * <code>optional uint64 scanner_id = 2;</code>
19238        */
19239       public boolean hasScannerId() {
19240         return ((bitField0_ & 0x00000002) == 0x00000002);
19241       }
19242       /**
19243        * <code>optional uint64 scanner_id = 2;</code>
19244        */
19245       public long getScannerId() {
19246         return scannerId_;
19247       }
19248       /**
19249        * <code>optional uint64 scanner_id = 2;</code>
19250        */
19251       public Builder setScannerId(long value) {
19252         bitField0_ |= 0x00000002;
19253         scannerId_ = value;
19254         onChanged();
19255         return this;
19256       }
19257       /**
19258        * <code>optional uint64 scanner_id = 2;</code>
19259        */
19260       public Builder clearScannerId() {
19261         bitField0_ = (bitField0_ & ~0x00000002);
19262         scannerId_ = 0L;
19263         onChanged();
19264         return this;
19265       }
19266 
19267       // optional bool more_results = 3;
19268       private boolean moreResults_ ;
19269       /**
19270        * <code>optional bool more_results = 3;</code>
19271        */
19272       public boolean hasMoreResults() {
19273         return ((bitField0_ & 0x00000004) == 0x00000004);
19274       }
19275       /**
19276        * <code>optional bool more_results = 3;</code>
19277        */
19278       public boolean getMoreResults() {
19279         return moreResults_;
19280       }
19281       /**
19282        * <code>optional bool more_results = 3;</code>
19283        */
19284       public Builder setMoreResults(boolean value) {
19285         bitField0_ |= 0x00000004;
19286         moreResults_ = value;
19287         onChanged();
19288         return this;
19289       }
19290       /**
19291        * <code>optional bool more_results = 3;</code>
19292        */
19293       public Builder clearMoreResults() {
19294         bitField0_ = (bitField0_ & ~0x00000004);
19295         moreResults_ = false;
19296         onChanged();
19297         return this;
19298       }
19299 
19300       // optional uint32 ttl = 4;
19301       private int ttl_ ;
19302       /**
19303        * <code>optional uint32 ttl = 4;</code>
19304        */
19305       public boolean hasTtl() {
19306         return ((bitField0_ & 0x00000008) == 0x00000008);
19307       }
19308       /**
19309        * <code>optional uint32 ttl = 4;</code>
19310        */
19311       public int getTtl() {
19312         return ttl_;
19313       }
19314       /**
19315        * <code>optional uint32 ttl = 4;</code>
19316        */
19317       public Builder setTtl(int value) {
19318         bitField0_ |= 0x00000008;
19319         ttl_ = value;
19320         onChanged();
19321         return this;
19322       }
19323       /**
19324        * <code>optional uint32 ttl = 4;</code>
19325        */
19326       public Builder clearTtl() {
19327         bitField0_ = (bitField0_ & ~0x00000008);
19328         ttl_ = 0;
19329         onChanged();
19330         return this;
19331       }
19332 
19333       // repeated .hbase.pb.Result results = 5;
19334       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result> results_ =
19335         java.util.Collections.emptyList();
19336       private void ensureResultsIsMutable() {
19337         if (!((bitField0_ & 0x00000010) == 0x00000010)) {
19338           results_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result>(results_);
19339           bitField0_ |= 0x00000010;
19340          }
19341       }
19342 
19343       private com.google.protobuf.RepeatedFieldBuilder<
19344           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> resultsBuilder_;
19345 
19346       /**
19347        * <code>repeated .hbase.pb.Result results = 5;</code>
19348        *
19349        * <pre>
19350        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
19351        * This field is mutually exclusive with cells_per_result (since the Cells will
19352        * be inside the pb'd Result)
19353        * </pre>
19354        */
19355       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result> getResultsList() {
19356         if (resultsBuilder_ == null) {
19357           return java.util.Collections.unmodifiableList(results_);
19358         } else {
19359           return resultsBuilder_.getMessageList();
19360         }
19361       }
19362       /**
19363        * <code>repeated .hbase.pb.Result results = 5;</code>
19364        *
19365        * <pre>
19366        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
19367        * This field is mutually exclusive with cells_per_result (since the Cells will
19368        * be inside the pb'd Result)
19369        * </pre>
19370        */
19371       public int getResultsCount() {
19372         if (resultsBuilder_ == null) {
19373           return results_.size();
19374         } else {
19375           return resultsBuilder_.getCount();
19376         }
19377       }
19378       /**
19379        * <code>repeated .hbase.pb.Result results = 5;</code>
19380        *
19381        * <pre>
19382        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
19383        * This field is mutually exclusive with cells_per_result (since the Cells will
19384        * be inside the pb'd Result)
19385        * </pre>
19386        */
19387       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResults(int index) {
19388         if (resultsBuilder_ == null) {
19389           return results_.get(index);
19390         } else {
19391           return resultsBuilder_.getMessage(index);
19392         }
19393       }
19394       /**
19395        * <code>repeated .hbase.pb.Result results = 5;</code>
19396        *
19397        * <pre>
19398        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
19399        * This field is mutually exclusive with cells_per_result (since the Cells will
19400        * be inside the pb'd Result)
19401        * </pre>
19402        */
19403       public Builder setResults(
19404           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) {
19405         if (resultsBuilder_ == null) {
19406           if (value == null) {
19407             throw new NullPointerException();
19408           }
19409           ensureResultsIsMutable();
19410           results_.set(index, value);
19411           onChanged();
19412         } else {
19413           resultsBuilder_.setMessage(index, value);
19414         }
19415         return this;
19416       }
19417       /**
19418        * <code>repeated .hbase.pb.Result results = 5;</code>
19419        *
19420        * <pre>
19421        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
19422        * This field is mutually exclusive with cells_per_result (since the Cells will
19423        * be inside the pb'd Result)
19424        * </pre>
19425        */
19426       public Builder setResults(
19427           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) {
19428         if (resultsBuilder_ == null) {
19429           ensureResultsIsMutable();
19430           results_.set(index, builderForValue.build());
19431           onChanged();
19432         } else {
19433           resultsBuilder_.setMessage(index, builderForValue.build());
19434         }
19435         return this;
19436       }
19437       /**
19438        * <code>repeated .hbase.pb.Result results = 5;</code>
19439        *
19440        * <pre>
19441        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
19442        * This field is mutually exclusive with cells_per_result (since the Cells will
19443        * be inside the pb'd Result)
19444        * </pre>
19445        */
19446       public Builder addResults(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) {
19447         if (resultsBuilder_ == null) {
19448           if (value == null) {
19449             throw new NullPointerException();
19450           }
19451           ensureResultsIsMutable();
19452           results_.add(value);
19453           onChanged();
19454         } else {
19455           resultsBuilder_.addMessage(value);
19456         }
19457         return this;
19458       }
19459       /**
19460        * <code>repeated .hbase.pb.Result results = 5;</code>
19461        *
19462        * <pre>
19463        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
19464        * This field is mutually exclusive with cells_per_result (since the Cells will
19465        * be inside the pb'd Result)
19466        * </pre>
19467        */
19468       public Builder addResults(
19469           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) {
19470         if (resultsBuilder_ == null) {
19471           if (value == null) {
19472             throw new NullPointerException();
19473           }
19474           ensureResultsIsMutable();
19475           results_.add(index, value);
19476           onChanged();
19477         } else {
19478           resultsBuilder_.addMessage(index, value);
19479         }
19480         return this;
19481       }
19482       /**
19483        * <code>repeated .hbase.pb.Result results = 5;</code>
19484        *
19485        * <pre>
19486        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
19487        * This field is mutually exclusive with cells_per_result (since the Cells will
19488        * be inside the pb'd Result)
19489        * </pre>
19490        */
19491       public Builder addResults(
19492           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) {
19493         if (resultsBuilder_ == null) {
19494           ensureResultsIsMutable();
19495           results_.add(builderForValue.build());
19496           onChanged();
19497         } else {
19498           resultsBuilder_.addMessage(builderForValue.build());
19499         }
19500         return this;
19501       }
19502       /**
19503        * <code>repeated .hbase.pb.Result results = 5;</code>
19504        *
19505        * <pre>
19506        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
19507        * This field is mutually exclusive with cells_per_result (since the Cells will
19508        * be inside the pb'd Result)
19509        * </pre>
19510        */
19511       public Builder addResults(
19512           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) {
19513         if (resultsBuilder_ == null) {
19514           ensureResultsIsMutable();
19515           results_.add(index, builderForValue.build());
19516           onChanged();
19517         } else {
19518           resultsBuilder_.addMessage(index, builderForValue.build());
19519         }
19520         return this;
19521       }
19522       /**
19523        * <code>repeated .hbase.pb.Result results = 5;</code>
19524        *
19525        * <pre>
19526        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
19527        * This field is mutually exclusive with cells_per_result (since the Cells will
19528        * be inside the pb'd Result)
19529        * </pre>
19530        */
19531       public Builder addAllResults(
19532           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result> values) {
19533         if (resultsBuilder_ == null) {
19534           ensureResultsIsMutable();
19535           super.addAll(values, results_);
19536           onChanged();
19537         } else {
19538           resultsBuilder_.addAllMessages(values);
19539         }
19540         return this;
19541       }
19542       /**
19543        * <code>repeated .hbase.pb.Result results = 5;</code>
19544        *
19545        * <pre>
19546        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
19547        * This field is mutually exclusive with cells_per_result (since the Cells will
19548        * be inside the pb'd Result)
19549        * </pre>
19550        */
19551       public Builder clearResults() {
19552         if (resultsBuilder_ == null) {
19553           results_ = java.util.Collections.emptyList();
19554           bitField0_ = (bitField0_ & ~0x00000010);
19555           onChanged();
19556         } else {
19557           resultsBuilder_.clear();
19558         }
19559         return this;
19560       }
19561       /**
19562        * <code>repeated .hbase.pb.Result results = 5;</code>
19563        *
19564        * <pre>
19565        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
19566        * This field is mutually exclusive with cells_per_result (since the Cells will
19567        * be inside the pb'd Result)
19568        * </pre>
19569        */
19570       public Builder removeResults(int index) {
19571         if (resultsBuilder_ == null) {
19572           ensureResultsIsMutable();
19573           results_.remove(index);
19574           onChanged();
19575         } else {
19576           resultsBuilder_.remove(index);
19577         }
19578         return this;
19579       }
19580       /**
19581        * <code>repeated .hbase.pb.Result results = 5;</code>
19582        *
19583        * <pre>
19584        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
19585        * This field is mutually exclusive with cells_per_result (since the Cells will
19586        * be inside the pb'd Result)
19587        * </pre>
19588        */
19589       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder getResultsBuilder(
19590           int index) {
19591         return getResultsFieldBuilder().getBuilder(index);
19592       }
19593       /**
19594        * <code>repeated .hbase.pb.Result results = 5;</code>
19595        *
19596        * <pre>
19597        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
19598        * This field is mutually exclusive with cells_per_result (since the Cells will
19599        * be inside the pb'd Result)
19600        * </pre>
19601        */
19602       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultsOrBuilder(
19603           int index) {
19604         if (resultsBuilder_ == null) {
19605           return results_.get(index);  } else {
19606           return resultsBuilder_.getMessageOrBuilder(index);
19607         }
19608       }
19609       /**
19610        * <code>repeated .hbase.pb.Result results = 5;</code>
19611        *
19612        * <pre>
19613        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
19614        * This field is mutually exclusive with cells_per_result (since the Cells will
19615        * be inside the pb'd Result)
19616        * </pre>
19617        */
19618       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> 
19619            getResultsOrBuilderList() {
19620         if (resultsBuilder_ != null) {
19621           return resultsBuilder_.getMessageOrBuilderList();
19622         } else {
19623           return java.util.Collections.unmodifiableList(results_);
19624         }
19625       }
19626       /**
19627        * <code>repeated .hbase.pb.Result results = 5;</code>
19628        *
19629        * <pre>
19630        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
19631        * This field is mutually exclusive with cells_per_result (since the Cells will
19632        * be inside the pb'd Result)
19633        * </pre>
19634        */
19635       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder addResultsBuilder() {
19636         return getResultsFieldBuilder().addBuilder(
19637             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance());
19638       }
19639       /**
19640        * <code>repeated .hbase.pb.Result results = 5;</code>
19641        *
19642        * <pre>
19643        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
19644        * This field is mutually exclusive with cells_per_result (since the Cells will
19645        * be inside the pb'd Result)
19646        * </pre>
19647        */
19648       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder addResultsBuilder(
19649           int index) {
19650         return getResultsFieldBuilder().addBuilder(
19651             index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance());
19652       }
19653       /**
19654        * <code>repeated .hbase.pb.Result results = 5;</code>
19655        *
19656        * <pre>
19657        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
19658        * This field is mutually exclusive with cells_per_result (since the Cells will
19659        * be inside the pb'd Result)
19660        * </pre>
19661        */
19662       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder> 
19663            getResultsBuilderList() {
19664         return getResultsFieldBuilder().getBuilderList();
19665       }
19666       private com.google.protobuf.RepeatedFieldBuilder<
19667           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> 
19668           getResultsFieldBuilder() {
19669         if (resultsBuilder_ == null) {
19670           resultsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
19671               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder>(
19672                   results_,
19673                   ((bitField0_ & 0x00000010) == 0x00000010),
19674                   getParentForChildren(),
19675                   isClean());
19676           results_ = null;
19677         }
19678         return resultsBuilder_;
19679       }
19680 
19681       // optional bool stale = 6;
19682       private boolean stale_ ;
19683       /**
19684        * <code>optional bool stale = 6;</code>
19685        */
19686       public boolean hasStale() {
19687         return ((bitField0_ & 0x00000020) == 0x00000020);
19688       }
19689       /**
19690        * <code>optional bool stale = 6;</code>
19691        */
19692       public boolean getStale() {
19693         return stale_;
19694       }
19695       /**
19696        * <code>optional bool stale = 6;</code>
19697        */
19698       public Builder setStale(boolean value) {
19699         bitField0_ |= 0x00000020;
19700         stale_ = value;
19701         onChanged();
19702         return this;
19703       }
19704       /**
19705        * <code>optional bool stale = 6;</code>
19706        */
19707       public Builder clearStale() {
19708         bitField0_ = (bitField0_ & ~0x00000020);
19709         stale_ = false;
19710         onChanged();
19711         return this;
19712       }
19713 
19714       // repeated bool partial_flag_per_result = 7;
19715       private java.util.List<java.lang.Boolean> partialFlagPerResult_ = java.util.Collections.emptyList();
19716       private void ensurePartialFlagPerResultIsMutable() {
19717         if (!((bitField0_ & 0x00000040) == 0x00000040)) {
19718           partialFlagPerResult_ = new java.util.ArrayList<java.lang.Boolean>(partialFlagPerResult_);
19719           bitField0_ |= 0x00000040;
19720          }
19721       }
19722       /**
19723        * <code>repeated bool partial_flag_per_result = 7;</code>
19724        *
19725        * <pre>
19726        * This field is filled in if we are doing cellblocks. In the event that a row
19727        * could not fit all of its cells into a single RPC chunk, the results will be
19728        * returned as partials, and reconstructed into a complete result on the client
19729        * side. This field is a list of flags indicating whether or not the result
19730        * that the cells belong to is a partial result. For example, if this field
19731        * has false, false, true in it, then we know that on the client side, we need to
19732        * make another RPC request since the last result was only a partial.
19733        * </pre>
19734        */
19735       public java.util.List<java.lang.Boolean>
19736           getPartialFlagPerResultList() {
19737         return java.util.Collections.unmodifiableList(partialFlagPerResult_);
19738       }
19739       /**
19740        * <code>repeated bool partial_flag_per_result = 7;</code>
19741        *
19742        * <pre>
19743        * This field is filled in if we are doing cellblocks. In the event that a row
19744        * could not fit all of its cells into a single RPC chunk, the results will be
19745        * returned as partials, and reconstructed into a complete result on the client
19746        * side. This field is a list of flags indicating whether or not the result
19747        * that the cells belong to is a partial result. For example, if this field
19748        * has false, false, true in it, then we know that on the client side, we need to
19749        * make another RPC request since the last result was only a partial.
19750        * </pre>
19751        */
19752       public int getPartialFlagPerResultCount() {
19753         return partialFlagPerResult_.size();
19754       }
19755       /**
19756        * <code>repeated bool partial_flag_per_result = 7;</code>
19757        *
19758        * <pre>
19759        * This field is filled in if we are doing cellblocks. In the event that a row
19760        * could not fit all of its cells into a single RPC chunk, the results will be
19761        * returned as partials, and reconstructed into a complete result on the client
19762        * side. This field is a list of flags indicating whether or not the result
19763        * that the cells belong to is a partial result. For example, if this field
19764        * has false, false, true in it, then we know that on the client side, we need to
19765        * make another RPC request since the last result was only a partial.
19766        * </pre>
19767        */
19768       public boolean getPartialFlagPerResult(int index) {
19769         return partialFlagPerResult_.get(index);
19770       }
19771       /**
19772        * <code>repeated bool partial_flag_per_result = 7;</code>
19773        *
19774        * <pre>
19775        * This field is filled in if we are doing cellblocks. In the event that a row
19776        * could not fit all of its cells into a single RPC chunk, the results will be
19777        * returned as partials, and reconstructed into a complete result on the client
19778        * side. This field is a list of flags indicating whether or not the result
19779        * that the cells belong to is a partial result. For example, if this field
19780        * has false, false, true in it, then we know that on the client side, we need to
19781        * make another RPC request since the last result was only a partial.
19782        * </pre>
19783        */
19784       public Builder setPartialFlagPerResult(
19785           int index, boolean value) {
19786         ensurePartialFlagPerResultIsMutable();
19787         partialFlagPerResult_.set(index, value);
19788         onChanged();
19789         return this;
19790       }
19791       /**
19792        * <code>repeated bool partial_flag_per_result = 7;</code>
19793        *
19794        * <pre>
19795        * This field is filled in if we are doing cellblocks. In the event that a row
19796        * could not fit all of its cells into a single RPC chunk, the results will be
19797        * returned as partials, and reconstructed into a complete result on the client
19798        * side. This field is a list of flags indicating whether or not the result
19799        * that the cells belong to is a partial result. For example, if this field
19800        * has false, false, true in it, then we know that on the client side, we need to
19801        * make another RPC request since the last result was only a partial.
19802        * </pre>
19803        */
19804       public Builder addPartialFlagPerResult(boolean value) {
19805         ensurePartialFlagPerResultIsMutable();
19806         partialFlagPerResult_.add(value);
19807         onChanged();
19808         return this;
19809       }
19810       /**
19811        * <code>repeated bool partial_flag_per_result = 7;</code>
19812        *
19813        * <pre>
19814        * This field is filled in if we are doing cellblocks. In the event that a row
19815        * could not fit all of its cells into a single RPC chunk, the results will be
19816        * returned as partials, and reconstructed into a complete result on the client
19817        * side. This field is a list of flags indicating whether or not the result
19818        * that the cells belong to is a partial result. For example, if this field
19819        * has false, false, true in it, then we know that on the client side, we need to
19820        * make another RPC request since the last result was only a partial.
19821        * </pre>
19822        */
19823       public Builder addAllPartialFlagPerResult(
19824           java.lang.Iterable<? extends java.lang.Boolean> values) {
19825         ensurePartialFlagPerResultIsMutable();
19826         super.addAll(values, partialFlagPerResult_);
19827         onChanged();
19828         return this;
19829       }
19830       /**
19831        * <code>repeated bool partial_flag_per_result = 7;</code>
19832        *
19833        * <pre>
19834        * This field is filled in if we are doing cellblocks. In the event that a row
19835        * could not fit all of its cells into a single RPC chunk, the results will be
19836        * returned as partials, and reconstructed into a complete result on the client
19837        * side. This field is a list of flags indicating whether or not the result
19838        * that the cells belong to is a partial result. For example, if this field
19839        * has false, false, true in it, then we know that on the client side, we need to
19840        * make another RPC request since the last result was only a partial.
19841        * </pre>
19842        */
19843       public Builder clearPartialFlagPerResult() {
19844         partialFlagPerResult_ = java.util.Collections.emptyList();
19845         bitField0_ = (bitField0_ & ~0x00000040);
19846         onChanged();
19847         return this;
19848       }
19849 
19850       // optional bool more_results_in_region = 8;
19851       private boolean moreResultsInRegion_ ;
19852       /**
19853        * <code>optional bool more_results_in_region = 8;</code>
19854        *
19855        * <pre>
19856        * A server may choose to limit the number of results returned to the client for
19857        * reasons such as the size in bytes or quantity of results accumulated. This field
19858        * will true when more results exist in the current region.
19859        * </pre>
19860        */
19861       public boolean hasMoreResultsInRegion() {
19862         return ((bitField0_ & 0x00000080) == 0x00000080);
19863       }
19864       /**
19865        * <code>optional bool more_results_in_region = 8;</code>
19866        *
19867        * <pre>
19868        * A server may choose to limit the number of results returned to the client for
19869        * reasons such as the size in bytes or quantity of results accumulated. This field
19870        * will true when more results exist in the current region.
19871        * </pre>
19872        */
19873       public boolean getMoreResultsInRegion() {
19874         return moreResultsInRegion_;
19875       }
19876       /**
19877        * <code>optional bool more_results_in_region = 8;</code>
19878        *
19879        * <pre>
19880        * A server may choose to limit the number of results returned to the client for
19881        * reasons such as the size in bytes or quantity of results accumulated. This field
19882        * will true when more results exist in the current region.
19883        * </pre>
19884        */
19885       public Builder setMoreResultsInRegion(boolean value) {
19886         bitField0_ |= 0x00000080;
19887         moreResultsInRegion_ = value;
19888         onChanged();
19889         return this;
19890       }
19891       /**
19892        * <code>optional bool more_results_in_region = 8;</code>
19893        *
19894        * <pre>
19895        * A server may choose to limit the number of results returned to the client for
19896        * reasons such as the size in bytes or quantity of results accumulated. This field
19897        * will true when more results exist in the current region.
19898        * </pre>
19899        */
19900       public Builder clearMoreResultsInRegion() {
19901         bitField0_ = (bitField0_ & ~0x00000080);
19902         moreResultsInRegion_ = false;
19903         onChanged();
19904         return this;
19905       }
19906 
19907       // optional bool heartbeat_message = 9;
19908       private boolean heartbeatMessage_ ;
19909       /**
19910        * <code>optional bool heartbeat_message = 9;</code>
19911        *
19912        * <pre>
19913        * This field is filled in if the server is sending back a heartbeat message.
19914        * Heartbeat messages are sent back to the client to prevent the scanner from
19915        * timing out. Seeing a heartbeat message communicates to the Client that the
19916        * server would have continued to scan had the time limit not been reached.
19917        * </pre>
19918        */
19919       public boolean hasHeartbeatMessage() {
19920         return ((bitField0_ & 0x00000100) == 0x00000100);
19921       }
19922       /**
19923        * <code>optional bool heartbeat_message = 9;</code>
19924        *
19925        * <pre>
19926        * This field is filled in if the server is sending back a heartbeat message.
19927        * Heartbeat messages are sent back to the client to prevent the scanner from
19928        * timing out. Seeing a heartbeat message communicates to the Client that the
19929        * server would have continued to scan had the time limit not been reached.
19930        * </pre>
19931        */
19932       public boolean getHeartbeatMessage() {
19933         return heartbeatMessage_;
19934       }
19935       /**
19936        * <code>optional bool heartbeat_message = 9;</code>
19937        *
19938        * <pre>
19939        * This field is filled in if the server is sending back a heartbeat message.
19940        * Heartbeat messages are sent back to the client to prevent the scanner from
19941        * timing out. Seeing a heartbeat message communicates to the Client that the
19942        * server would have continued to scan had the time limit not been reached.
19943        * </pre>
19944        */
19945       public Builder setHeartbeatMessage(boolean value) {
19946         bitField0_ |= 0x00000100;
19947         heartbeatMessage_ = value;
19948         onChanged();
19949         return this;
19950       }
19951       /**
19952        * <code>optional bool heartbeat_message = 9;</code>
19953        *
19954        * <pre>
19955        * This field is filled in if the server is sending back a heartbeat message.
19956        * Heartbeat messages are sent back to the client to prevent the scanner from
19957        * timing out. Seeing a heartbeat message communicates to the Client that the
19958        * server would have continued to scan had the time limit not been reached.
19959        * </pre>
19960        */
19961       public Builder clearHeartbeatMessage() {
19962         bitField0_ = (bitField0_ & ~0x00000100);
19963         heartbeatMessage_ = false;
19964         onChanged();
19965         return this;
19966       }
19967 
19968       // optional .hbase.pb.ScanMetrics scan_metrics = 10;
19969       private org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics scanMetrics_ = org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.getDefaultInstance();
19970       private com.google.protobuf.SingleFieldBuilder<
19971           org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics, org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.Builder, org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetricsOrBuilder> scanMetricsBuilder_;
19972       /**
19973        * <code>optional .hbase.pb.ScanMetrics scan_metrics = 10;</code>
19974        *
19975        * <pre>
19976        * This field is filled in if the client has requested that scan metrics be tracked.
19977        * The metrics tracked here are sent back to the client to be tracked together with 
19978        * the existing client side metrics.
19979        * </pre>
19980        */
19981       public boolean hasScanMetrics() {
19982         return ((bitField0_ & 0x00000200) == 0x00000200);
19983       }
19984       /**
19985        * <code>optional .hbase.pb.ScanMetrics scan_metrics = 10;</code>
19986        *
19987        * <pre>
19988        * This field is filled in if the client has requested that scan metrics be tracked.
19989        * The metrics tracked here are sent back to the client to be tracked together with 
19990        * the existing client side metrics.
19991        * </pre>
19992        */
19993       public org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics getScanMetrics() {
19994         if (scanMetricsBuilder_ == null) {
19995           return scanMetrics_;
19996         } else {
19997           return scanMetricsBuilder_.getMessage();
19998         }
19999       }
20000       /**
20001        * <code>optional .hbase.pb.ScanMetrics scan_metrics = 10;</code>
20002        *
20003        * <pre>
20004        * This field is filled in if the client has requested that scan metrics be tracked.
20005        * The metrics tracked here are sent back to the client to be tracked together with 
20006        * the existing client side metrics.
20007        * </pre>
20008        */
20009       public Builder setScanMetrics(org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics value) {
20010         if (scanMetricsBuilder_ == null) {
20011           if (value == null) {
20012             throw new NullPointerException();
20013           }
20014           scanMetrics_ = value;
20015           onChanged();
20016         } else {
20017           scanMetricsBuilder_.setMessage(value);
20018         }
20019         bitField0_ |= 0x00000200;
20020         return this;
20021       }
20022       /**
20023        * <code>optional .hbase.pb.ScanMetrics scan_metrics = 10;</code>
20024        *
20025        * <pre>
20026        * This field is filled in if the client has requested that scan metrics be tracked.
20027        * The metrics tracked here are sent back to the client to be tracked together with 
20028        * the existing client side metrics.
20029        * </pre>
20030        */
20031       public Builder setScanMetrics(
20032           org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.Builder builderForValue) {
20033         if (scanMetricsBuilder_ == null) {
20034           scanMetrics_ = builderForValue.build();
20035           onChanged();
20036         } else {
20037           scanMetricsBuilder_.setMessage(builderForValue.build());
20038         }
20039         bitField0_ |= 0x00000200;
20040         return this;
20041       }
20042       /**
20043        * <code>optional .hbase.pb.ScanMetrics scan_metrics = 10;</code>
20044        *
20045        * <pre>
20046        * This field is filled in if the client has requested that scan metrics be tracked.
20047        * The metrics tracked here are sent back to the client to be tracked together with 
20048        * the existing client side metrics.
20049        * </pre>
20050        */
20051       public Builder mergeScanMetrics(org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics value) {
20052         if (scanMetricsBuilder_ == null) {
20053           if (((bitField0_ & 0x00000200) == 0x00000200) &&
20054               scanMetrics_ != org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.getDefaultInstance()) {
20055             scanMetrics_ =
20056               org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.newBuilder(scanMetrics_).mergeFrom(value).buildPartial();
20057           } else {
20058             scanMetrics_ = value;
20059           }
20060           onChanged();
20061         } else {
20062           scanMetricsBuilder_.mergeFrom(value);
20063         }
20064         bitField0_ |= 0x00000200;
20065         return this;
20066       }
20067       /**
20068        * <code>optional .hbase.pb.ScanMetrics scan_metrics = 10;</code>
20069        *
20070        * <pre>
20071        * This field is filled in if the client has requested that scan metrics be tracked.
20072        * The metrics tracked here are sent back to the client to be tracked together with 
20073        * the existing client side metrics.
20074        * </pre>
20075        */
20076       public Builder clearScanMetrics() {
20077         if (scanMetricsBuilder_ == null) {
20078           scanMetrics_ = org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.getDefaultInstance();
20079           onChanged();
20080         } else {
20081           scanMetricsBuilder_.clear();
20082         }
20083         bitField0_ = (bitField0_ & ~0x00000200);
20084         return this;
20085       }
20086       /**
20087        * <code>optional .hbase.pb.ScanMetrics scan_metrics = 10;</code>
20088        *
20089        * <pre>
20090        * This field is filled in if the client has requested that scan metrics be tracked.
20091        * The metrics tracked here are sent back to the client to be tracked together with 
20092        * the existing client side metrics.
20093        * </pre>
20094        */
20095       public org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.Builder getScanMetricsBuilder() {
20096         bitField0_ |= 0x00000200;
20097         onChanged();
20098         return getScanMetricsFieldBuilder().getBuilder();
20099       }
20100       /**
20101        * <code>optional .hbase.pb.ScanMetrics scan_metrics = 10;</code>
20102        *
20103        * <pre>
20104        * This field is filled in if the client has requested that scan metrics be tracked.
20105        * The metrics tracked here are sent back to the client to be tracked together with 
20106        * the existing client side metrics.
20107        * </pre>
20108        */
20109       public org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetricsOrBuilder getScanMetricsOrBuilder() {
20110         if (scanMetricsBuilder_ != null) {
20111           return scanMetricsBuilder_.getMessageOrBuilder();
20112         } else {
20113           return scanMetrics_;
20114         }
20115       }
20116       /**
20117        * <code>optional .hbase.pb.ScanMetrics scan_metrics = 10;</code>
20118        *
20119        * <pre>
20120        * This field is filled in if the client has requested that scan metrics be tracked.
20121        * The metrics tracked here are sent back to the client to be tracked together with 
20122        * the existing client side metrics.
20123        * </pre>
20124        */
20125       private com.google.protobuf.SingleFieldBuilder<
20126           org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics, org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.Builder, org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetricsOrBuilder> 
20127           getScanMetricsFieldBuilder() {
20128         if (scanMetricsBuilder_ == null) {
20129           scanMetricsBuilder_ = new com.google.protobuf.SingleFieldBuilder<
20130               org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics, org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.Builder, org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetricsOrBuilder>(
20131                   scanMetrics_,
20132                   getParentForChildren(),
20133                   isClean());
20134           scanMetrics_ = null;
20135         }
20136         return scanMetricsBuilder_;
20137       }
20138 
20139       // @@protoc_insertion_point(builder_scope:hbase.pb.ScanResponse)
20140     }
20141 
20142     static {
20143       defaultInstance = new ScanResponse(true);
20144       defaultInstance.initFields();
20145     }
20146 
20147     // @@protoc_insertion_point(class_scope:hbase.pb.ScanResponse)
20148   }
20149 
20150   public interface BulkLoadHFileRequestOrBuilder
20151       extends com.google.protobuf.MessageOrBuilder {
20152 
20153     // required .hbase.pb.RegionSpecifier region = 1;
20154     /**
20155      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
20156      */
20157     boolean hasRegion();
20158     /**
20159      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
20160      */
20161     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
20162     /**
20163      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
20164      */
20165     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
20166 
20167     // repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;
20168     /**
20169      * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
20170      */
20171     java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath> 
20172         getFamilyPathList();
20173     /**
20174      * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
20175      */
20176     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index);
20177     /**
20178      * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
20179      */
20180     int getFamilyPathCount();
20181     /**
20182      * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
20183      */
20184     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder> 
20185         getFamilyPathOrBuilderList();
20186     /**
20187      * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
20188      */
20189     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder(
20190         int index);
20191 
20192     // optional bool assign_seq_num = 3;
20193     /**
20194      * <code>optional bool assign_seq_num = 3;</code>
20195      */
20196     boolean hasAssignSeqNum();
20197     /**
20198      * <code>optional bool assign_seq_num = 3;</code>
20199      */
20200     boolean getAssignSeqNum();
20201   }
20202   /**
20203    * Protobuf type {@code hbase.pb.BulkLoadHFileRequest}
20204    *
20205    * <pre>
20206    **
20207    * Atomically bulk load multiple HFiles (say from different column families)
20208    * into an open region.
20209    * </pre>
20210    */
20211   public static final class BulkLoadHFileRequest extends
20212       com.google.protobuf.GeneratedMessage
20213       implements BulkLoadHFileRequestOrBuilder {
20214     // Use BulkLoadHFileRequest.newBuilder() to construct.
20215     private BulkLoadHFileRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
20216       super(builder);
20217       this.unknownFields = builder.getUnknownFields();
20218     }
20219     private BulkLoadHFileRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
20220 
20221     private static final BulkLoadHFileRequest defaultInstance;
20222     public static BulkLoadHFileRequest getDefaultInstance() {
20223       return defaultInstance;
20224     }
20225 
20226     public BulkLoadHFileRequest getDefaultInstanceForType() {
20227       return defaultInstance;
20228     }
20229 
20230     private final com.google.protobuf.UnknownFieldSet unknownFields;
20231     @java.lang.Override
20232     public final com.google.protobuf.UnknownFieldSet
20233         getUnknownFields() {
20234       return this.unknownFields;
20235     }
20236     private BulkLoadHFileRequest(
20237         com.google.protobuf.CodedInputStream input,
20238         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20239         throws com.google.protobuf.InvalidProtocolBufferException {
20240       initFields();
20241       int mutable_bitField0_ = 0;
20242       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
20243           com.google.protobuf.UnknownFieldSet.newBuilder();
20244       try {
20245         boolean done = false;
20246         while (!done) {
20247           int tag = input.readTag();
20248           switch (tag) {
20249             case 0:
20250               done = true;
20251               break;
20252             default: {
20253               if (!parseUnknownField(input, unknownFields,
20254                                      extensionRegistry, tag)) {
20255                 done = true;
20256               }
20257               break;
20258             }
20259             case 10: {
20260               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null;
20261               if (((bitField0_ & 0x00000001) == 0x00000001)) {
20262                 subBuilder = region_.toBuilder();
20263               }
20264               region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry);
20265               if (subBuilder != null) {
20266                 subBuilder.mergeFrom(region_);
20267                 region_ = subBuilder.buildPartial();
20268               }
20269               bitField0_ |= 0x00000001;
20270               break;
20271             }
20272             case 18: {
20273               if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
20274                 familyPath_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath>();
20275                 mutable_bitField0_ |= 0x00000002;
20276               }
20277               familyPath_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.PARSER, extensionRegistry));
20278               break;
20279             }
20280             case 24: {
20281               bitField0_ |= 0x00000002;
20282               assignSeqNum_ = input.readBool();
20283               break;
20284             }
20285           }
20286         }
20287       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
20288         throw e.setUnfinishedMessage(this);
20289       } catch (java.io.IOException e) {
20290         throw new com.google.protobuf.InvalidProtocolBufferException(
20291             e.getMessage()).setUnfinishedMessage(this);
20292       } finally {
20293         if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
20294           familyPath_ = java.util.Collections.unmodifiableList(familyPath_);
20295         }
20296         this.unknownFields = unknownFields.build();
20297         makeExtensionsImmutable();
20298       }
20299     }
20300     public static final com.google.protobuf.Descriptors.Descriptor
20301         getDescriptor() {
20302       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileRequest_descriptor;
20303     }
20304 
20305     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
20306         internalGetFieldAccessorTable() {
20307       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileRequest_fieldAccessorTable
20308           .ensureFieldAccessorsInitialized(
20309               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.Builder.class);
20310     }
20311 
20312     public static com.google.protobuf.Parser<BulkLoadHFileRequest> PARSER =
20313         new com.google.protobuf.AbstractParser<BulkLoadHFileRequest>() {
20314       public BulkLoadHFileRequest parsePartialFrom(
20315           com.google.protobuf.CodedInputStream input,
20316           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20317           throws com.google.protobuf.InvalidProtocolBufferException {
20318         return new BulkLoadHFileRequest(input, extensionRegistry);
20319       }
20320     };
20321 
20322     @java.lang.Override
20323     public com.google.protobuf.Parser<BulkLoadHFileRequest> getParserForType() {
20324       return PARSER;
20325     }
20326 
20327     public interface FamilyPathOrBuilder
20328         extends com.google.protobuf.MessageOrBuilder {
20329 
20330       // required bytes family = 1;
20331       /**
20332        * <code>required bytes family = 1;</code>
20333        */
20334       boolean hasFamily();
20335       /**
20336        * <code>required bytes family = 1;</code>
20337        */
20338       com.google.protobuf.ByteString getFamily();
20339 
20340       // required string path = 2;
20341       /**
20342        * <code>required string path = 2;</code>
20343        */
20344       boolean hasPath();
20345       /**
20346        * <code>required string path = 2;</code>
20347        */
20348       java.lang.String getPath();
20349       /**
20350        * <code>required string path = 2;</code>
20351        */
20352       com.google.protobuf.ByteString
20353           getPathBytes();
20354     }
20355     /**
20356      * Protobuf type {@code hbase.pb.BulkLoadHFileRequest.FamilyPath}
20357      */
20358     public static final class FamilyPath extends
20359         com.google.protobuf.GeneratedMessage
20360         implements FamilyPathOrBuilder {
20361       // Use FamilyPath.newBuilder() to construct.
20362       private FamilyPath(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
20363         super(builder);
20364         this.unknownFields = builder.getUnknownFields();
20365       }
20366       private FamilyPath(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
20367 
20368       private static final FamilyPath defaultInstance;
20369       public static FamilyPath getDefaultInstance() {
20370         return defaultInstance;
20371       }
20372 
20373       public FamilyPath getDefaultInstanceForType() {
20374         return defaultInstance;
20375       }
20376 
20377       private final com.google.protobuf.UnknownFieldSet unknownFields;
20378       @java.lang.Override
20379       public final com.google.protobuf.UnknownFieldSet
20380           getUnknownFields() {
20381         return this.unknownFields;
20382       }
20383       private FamilyPath(
20384           com.google.protobuf.CodedInputStream input,
20385           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20386           throws com.google.protobuf.InvalidProtocolBufferException {
20387         initFields();
20388         int mutable_bitField0_ = 0;
20389         com.google.protobuf.UnknownFieldSet.Builder unknownFields =
20390             com.google.protobuf.UnknownFieldSet.newBuilder();
20391         try {
20392           boolean done = false;
20393           while (!done) {
20394             int tag = input.readTag();
20395             switch (tag) {
20396               case 0:
20397                 done = true;
20398                 break;
20399               default: {
20400                 if (!parseUnknownField(input, unknownFields,
20401                                        extensionRegistry, tag)) {
20402                   done = true;
20403                 }
20404                 break;
20405               }
20406               case 10: {
20407                 bitField0_ |= 0x00000001;
20408                 family_ = input.readBytes();
20409                 break;
20410               }
20411               case 18: {
20412                 bitField0_ |= 0x00000002;
20413                 path_ = input.readBytes();
20414                 break;
20415               }
20416             }
20417           }
20418         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
20419           throw e.setUnfinishedMessage(this);
20420         } catch (java.io.IOException e) {
20421           throw new com.google.protobuf.InvalidProtocolBufferException(
20422               e.getMessage()).setUnfinishedMessage(this);
20423         } finally {
20424           this.unknownFields = unknownFields.build();
20425           makeExtensionsImmutable();
20426         }
20427       }
20428       public static final com.google.protobuf.Descriptors.Descriptor
20429           getDescriptor() {
20430         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileRequest_FamilyPath_descriptor;
20431       }
20432 
20433       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
20434           internalGetFieldAccessorTable() {
20435         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable
20436             .ensureFieldAccessorsInitialized(
20437                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder.class);
20438       }
20439 
20440       public static com.google.protobuf.Parser<FamilyPath> PARSER =
20441           new com.google.protobuf.AbstractParser<FamilyPath>() {
20442         public FamilyPath parsePartialFrom(
20443             com.google.protobuf.CodedInputStream input,
20444             com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20445             throws com.google.protobuf.InvalidProtocolBufferException {
20446           return new FamilyPath(input, extensionRegistry);
20447         }
20448       };
20449 
20450       @java.lang.Override
20451       public com.google.protobuf.Parser<FamilyPath> getParserForType() {
20452         return PARSER;
20453       }
20454 
20455       private int bitField0_;
20456       // required bytes family = 1;
20457       public static final int FAMILY_FIELD_NUMBER = 1;
20458       private com.google.protobuf.ByteString family_;
20459       /**
20460        * <code>required bytes family = 1;</code>
20461        */
20462       public boolean hasFamily() {
20463         return ((bitField0_ & 0x00000001) == 0x00000001);
20464       }
20465       /**
20466        * <code>required bytes family = 1;</code>
20467        */
20468       public com.google.protobuf.ByteString getFamily() {
20469         return family_;
20470       }
20471 
20472       // required string path = 2;
20473       public static final int PATH_FIELD_NUMBER = 2;
20474       private java.lang.Object path_;
20475       /**
20476        * <code>required string path = 2;</code>
20477        */
20478       public boolean hasPath() {
20479         return ((bitField0_ & 0x00000002) == 0x00000002);
20480       }
20481       /**
20482        * <code>required string path = 2;</code>
20483        */
20484       public java.lang.String getPath() {
20485         java.lang.Object ref = path_;
20486         if (ref instanceof java.lang.String) {
20487           return (java.lang.String) ref;
20488         } else {
20489           com.google.protobuf.ByteString bs = 
20490               (com.google.protobuf.ByteString) ref;
20491           java.lang.String s = bs.toStringUtf8();
20492           if (bs.isValidUtf8()) {
20493             path_ = s;
20494           }
20495           return s;
20496         }
20497       }
20498       /**
20499        * <code>required string path = 2;</code>
20500        */
20501       public com.google.protobuf.ByteString
20502           getPathBytes() {
20503         java.lang.Object ref = path_;
20504         if (ref instanceof java.lang.String) {
20505           com.google.protobuf.ByteString b = 
20506               com.google.protobuf.ByteString.copyFromUtf8(
20507                   (java.lang.String) ref);
20508           path_ = b;
20509           return b;
20510         } else {
20511           return (com.google.protobuf.ByteString) ref;
20512         }
20513       }
20514 
20515       private void initFields() {
20516         family_ = com.google.protobuf.ByteString.EMPTY;
20517         path_ = "";
20518       }
20519       private byte memoizedIsInitialized = -1;
20520       public final boolean isInitialized() {
20521         byte isInitialized = memoizedIsInitialized;
20522         if (isInitialized != -1) return isInitialized == 1;
20523 
20524         if (!hasFamily()) {
20525           memoizedIsInitialized = 0;
20526           return false;
20527         }
20528         if (!hasPath()) {
20529           memoizedIsInitialized = 0;
20530           return false;
20531         }
20532         memoizedIsInitialized = 1;
20533         return true;
20534       }
20535 
20536       public void writeTo(com.google.protobuf.CodedOutputStream output)
20537                           throws java.io.IOException {
20538         getSerializedSize();
20539         if (((bitField0_ & 0x00000001) == 0x00000001)) {
20540           output.writeBytes(1, family_);
20541         }
20542         if (((bitField0_ & 0x00000002) == 0x00000002)) {
20543           output.writeBytes(2, getPathBytes());
20544         }
20545         getUnknownFields().writeTo(output);
20546       }
20547 
20548       private int memoizedSerializedSize = -1;
20549       public int getSerializedSize() {
20550         int size = memoizedSerializedSize;
20551         if (size != -1) return size;
20552 
20553         size = 0;
20554         if (((bitField0_ & 0x00000001) == 0x00000001)) {
20555           size += com.google.protobuf.CodedOutputStream
20556             .computeBytesSize(1, family_);
20557         }
20558         if (((bitField0_ & 0x00000002) == 0x00000002)) {
20559           size += com.google.protobuf.CodedOutputStream
20560             .computeBytesSize(2, getPathBytes());
20561         }
20562         size += getUnknownFields().getSerializedSize();
20563         memoizedSerializedSize = size;
20564         return size;
20565       }
20566 
20567       private static final long serialVersionUID = 0L;
20568       @java.lang.Override
20569       protected java.lang.Object writeReplace()
20570           throws java.io.ObjectStreamException {
20571         return super.writeReplace();
20572       }
20573 
20574       @java.lang.Override
20575       public boolean equals(final java.lang.Object obj) {
20576         if (obj == this) {
20577          return true;
20578         }
20579         if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath)) {
20580           return super.equals(obj);
20581         }
20582         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath) obj;
20583 
20584         boolean result = true;
20585         result = result && (hasFamily() == other.hasFamily());
20586         if (hasFamily()) {
20587           result = result && getFamily()
20588               .equals(other.getFamily());
20589         }
20590         result = result && (hasPath() == other.hasPath());
20591         if (hasPath()) {
20592           result = result && getPath()
20593               .equals(other.getPath());
20594         }
20595         result = result &&
20596             getUnknownFields().equals(other.getUnknownFields());
20597         return result;
20598       }
20599 
20600       private int memoizedHashCode = 0;
20601       @java.lang.Override
20602       public int hashCode() {
20603         if (memoizedHashCode != 0) {
20604           return memoizedHashCode;
20605         }
20606         int hash = 41;
20607         hash = (19 * hash) + getDescriptorForType().hashCode();
20608         if (hasFamily()) {
20609           hash = (37 * hash) + FAMILY_FIELD_NUMBER;
20610           hash = (53 * hash) + getFamily().hashCode();
20611         }
20612         if (hasPath()) {
20613           hash = (37 * hash) + PATH_FIELD_NUMBER;
20614           hash = (53 * hash) + getPath().hashCode();
20615         }
20616         hash = (29 * hash) + getUnknownFields().hashCode();
20617         memoizedHashCode = hash;
20618         return hash;
20619       }
20620 
20621       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom(
20622           com.google.protobuf.ByteString data)
20623           throws com.google.protobuf.InvalidProtocolBufferException {
20624         return PARSER.parseFrom(data);
20625       }
20626       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom(
20627           com.google.protobuf.ByteString data,
20628           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20629           throws com.google.protobuf.InvalidProtocolBufferException {
20630         return PARSER.parseFrom(data, extensionRegistry);
20631       }
20632       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom(byte[] data)
20633           throws com.google.protobuf.InvalidProtocolBufferException {
20634         return PARSER.parseFrom(data);
20635       }
20636       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom(
20637           byte[] data,
20638           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20639           throws com.google.protobuf.InvalidProtocolBufferException {
20640         return PARSER.parseFrom(data, extensionRegistry);
20641       }
20642       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom(java.io.InputStream input)
20643           throws java.io.IOException {
20644         return PARSER.parseFrom(input);
20645       }
20646       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom(
20647           java.io.InputStream input,
20648           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20649           throws java.io.IOException {
20650         return PARSER.parseFrom(input, extensionRegistry);
20651       }
20652       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseDelimitedFrom(java.io.InputStream input)
20653           throws java.io.IOException {
20654         return PARSER.parseDelimitedFrom(input);
20655       }
20656       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseDelimitedFrom(
20657           java.io.InputStream input,
20658           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20659           throws java.io.IOException {
20660         return PARSER.parseDelimitedFrom(input, extensionRegistry);
20661       }
20662       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom(
20663           com.google.protobuf.CodedInputStream input)
20664           throws java.io.IOException {
20665         return PARSER.parseFrom(input);
20666       }
20667       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom(
20668           com.google.protobuf.CodedInputStream input,
20669           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20670           throws java.io.IOException {
20671         return PARSER.parseFrom(input, extensionRegistry);
20672       }
20673 
20674       public static Builder newBuilder() { return Builder.create(); }
20675       public Builder newBuilderForType() { return newBuilder(); }
20676       public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath prototype) {
20677         return newBuilder().mergeFrom(prototype);
20678       }
20679       public Builder toBuilder() { return newBuilder(this); }
20680 
20681       @java.lang.Override
20682       protected Builder newBuilderForType(
20683           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
20684         Builder builder = new Builder(parent);
20685         return builder;
20686       }
20687       /**
20688        * Protobuf type {@code hbase.pb.BulkLoadHFileRequest.FamilyPath}
20689        */
20690       public static final class Builder extends
20691           com.google.protobuf.GeneratedMessage.Builder<Builder>
20692          implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder {
20693         public static final com.google.protobuf.Descriptors.Descriptor
20694             getDescriptor() {
20695           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileRequest_FamilyPath_descriptor;
20696         }
20697 
20698         protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
20699             internalGetFieldAccessorTable() {
20700           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable
20701               .ensureFieldAccessorsInitialized(
20702                   org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder.class);
20703         }
20704 
20705         // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.newBuilder()
20706         private Builder() {
20707           maybeForceBuilderInitialization();
20708         }
20709 
20710         private Builder(
20711             com.google.protobuf.GeneratedMessage.BuilderParent parent) {
20712           super(parent);
20713           maybeForceBuilderInitialization();
20714         }
20715         private void maybeForceBuilderInitialization() {
20716           if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
20717           }
20718         }
20719         private static Builder create() {
20720           return new Builder();
20721         }
20722 
20723         public Builder clear() {
20724           super.clear();
20725           family_ = com.google.protobuf.ByteString.EMPTY;
20726           bitField0_ = (bitField0_ & ~0x00000001);
20727           path_ = "";
20728           bitField0_ = (bitField0_ & ~0x00000002);
20729           return this;
20730         }
20731 
20732         public Builder clone() {
20733           return create().mergeFrom(buildPartial());
20734         }
20735 
20736         public com.google.protobuf.Descriptors.Descriptor
20737             getDescriptorForType() {
20738           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileRequest_FamilyPath_descriptor;
20739         }
20740 
20741         public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getDefaultInstanceForType() {
20742           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance();
20743         }
20744 
20745         public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath build() {
20746           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath result = buildPartial();
20747           if (!result.isInitialized()) {
20748             throw newUninitializedMessageException(result);
20749           }
20750           return result;
20751         }
20752 
20753         public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath buildPartial() {
20754           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath(this);
20755           int from_bitField0_ = bitField0_;
20756           int to_bitField0_ = 0;
20757           if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
20758             to_bitField0_ |= 0x00000001;
20759           }
20760           result.family_ = family_;
20761           if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
20762             to_bitField0_ |= 0x00000002;
20763           }
20764           result.path_ = path_;
20765           result.bitField0_ = to_bitField0_;
20766           onBuilt();
20767           return result;
20768         }
20769 
20770         public Builder mergeFrom(com.google.protobuf.Message other) {
20771           if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath) {
20772             return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath)other);
20773           } else {
20774             super.mergeFrom(other);
20775             return this;
20776           }
20777         }
20778 
20779         public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath other) {
20780           if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance()) return this;
20781           if (other.hasFamily()) {
20782             setFamily(other.getFamily());
20783           }
20784           if (other.hasPath()) {
20785             bitField0_ |= 0x00000002;
20786             path_ = other.path_;
20787             onChanged();
20788           }
20789           this.mergeUnknownFields(other.getUnknownFields());
20790           return this;
20791         }
20792 
20793         public final boolean isInitialized() {
20794           if (!hasFamily()) {
20795             
20796             return false;
20797           }
20798           if (!hasPath()) {
20799             
20800             return false;
20801           }
20802           return true;
20803         }
20804 
20805         public Builder mergeFrom(
20806             com.google.protobuf.CodedInputStream input,
20807             com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20808             throws java.io.IOException {
20809           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parsedMessage = null;
20810           try {
20811             parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
20812           } catch (com.google.protobuf.InvalidProtocolBufferException e) {
20813             parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath) e.getUnfinishedMessage();
20814             throw e;
20815           } finally {
20816             if (parsedMessage != null) {
20817               mergeFrom(parsedMessage);
20818             }
20819           }
20820           return this;
20821         }
20822         private int bitField0_;
20823 
20824         // required bytes family = 1;
20825         private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY;
20826         /**
20827          * <code>required bytes family = 1;</code>
20828          */
20829         public boolean hasFamily() {
20830           return ((bitField0_ & 0x00000001) == 0x00000001);
20831         }
20832         /**
20833          * <code>required bytes family = 1;</code>
20834          */
20835         public com.google.protobuf.ByteString getFamily() {
20836           return family_;
20837         }
20838         /**
20839          * <code>required bytes family = 1;</code>
20840          */
20841         public Builder setFamily(com.google.protobuf.ByteString value) {
20842           if (value == null) {
20843     throw new NullPointerException();
20844   }
20845   bitField0_ |= 0x00000001;
20846           family_ = value;
20847           onChanged();
20848           return this;
20849         }
20850         /**
20851          * <code>required bytes family = 1;</code>
20852          */
20853         public Builder clearFamily() {
20854           bitField0_ = (bitField0_ & ~0x00000001);
20855           family_ = getDefaultInstance().getFamily();
20856           onChanged();
20857           return this;
20858         }
20859 
20860         // required string path = 2;
20861         private java.lang.Object path_ = "";
20862         /**
20863          * <code>required string path = 2;</code>
20864          */
20865         public boolean hasPath() {
20866           return ((bitField0_ & 0x00000002) == 0x00000002);
20867         }
20868         /**
20869          * <code>required string path = 2;</code>
20870          */
20871         public java.lang.String getPath() {
20872           java.lang.Object ref = path_;
20873           if (!(ref instanceof java.lang.String)) {
20874             java.lang.String s = ((com.google.protobuf.ByteString) ref)
20875                 .toStringUtf8();
20876             path_ = s;
20877             return s;
20878           } else {
20879             return (java.lang.String) ref;
20880           }
20881         }
20882         /**
20883          * <code>required string path = 2;</code>
20884          */
20885         public com.google.protobuf.ByteString
20886             getPathBytes() {
20887           java.lang.Object ref = path_;
20888           if (ref instanceof String) {
20889             com.google.protobuf.ByteString b = 
20890                 com.google.protobuf.ByteString.copyFromUtf8(
20891                     (java.lang.String) ref);
20892             path_ = b;
20893             return b;
20894           } else {
20895             return (com.google.protobuf.ByteString) ref;
20896           }
20897         }
20898         /**
20899          * <code>required string path = 2;</code>
20900          */
20901         public Builder setPath(
20902             java.lang.String value) {
20903           if (value == null) {
20904     throw new NullPointerException();
20905   }
20906   bitField0_ |= 0x00000002;
20907           path_ = value;
20908           onChanged();
20909           return this;
20910         }
20911         /**
20912          * <code>required string path = 2;</code>
20913          */
20914         public Builder clearPath() {
20915           bitField0_ = (bitField0_ & ~0x00000002);
20916           path_ = getDefaultInstance().getPath();
20917           onChanged();
20918           return this;
20919         }
20920         /**
20921          * <code>required string path = 2;</code>
20922          */
20923         public Builder setPathBytes(
20924             com.google.protobuf.ByteString value) {
20925           if (value == null) {
20926     throw new NullPointerException();
20927   }
20928   bitField0_ |= 0x00000002;
20929           path_ = value;
20930           onChanged();
20931           return this;
20932         }
20933 
20934         // @@protoc_insertion_point(builder_scope:hbase.pb.BulkLoadHFileRequest.FamilyPath)
20935       }
20936 
20937       static {
20938         defaultInstance = new FamilyPath(true);
20939         defaultInstance.initFields();
20940       }
20941 
20942       // @@protoc_insertion_point(class_scope:hbase.pb.BulkLoadHFileRequest.FamilyPath)
20943     }
20944 
20945     private int bitField0_;
20946     // required .hbase.pb.RegionSpecifier region = 1;
20947     public static final int REGION_FIELD_NUMBER = 1;
20948     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_;
20949     /**
20950      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
20951      */
20952     public boolean hasRegion() {
20953       return ((bitField0_ & 0x00000001) == 0x00000001);
20954     }
20955     /**
20956      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
20957      */
20958     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
20959       return region_;
20960     }
20961     /**
20962      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
20963      */
20964     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
20965       return region_;
20966     }
20967 
20968     // repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;
20969     public static final int FAMILY_PATH_FIELD_NUMBER = 2;
20970     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath> familyPath_;
20971     /**
20972      * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
20973      */
20974     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath> getFamilyPathList() {
20975       return familyPath_;
20976     }
20977     /**
20978      * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
20979      */
20980     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder> 
20981         getFamilyPathOrBuilderList() {
20982       return familyPath_;
20983     }
20984     /**
20985      * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
20986      */
20987     public int getFamilyPathCount() {
20988       return familyPath_.size();
20989     }
20990     /**
20991      * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
20992      */
20993     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index) {
20994       return familyPath_.get(index);
20995     }
20996     /**
20997      * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
20998      */
20999     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder(
21000         int index) {
21001       return familyPath_.get(index);
21002     }
21003 
21004     // optional bool assign_seq_num = 3;
21005     public static final int ASSIGN_SEQ_NUM_FIELD_NUMBER = 3;
21006     private boolean assignSeqNum_;
21007     /**
21008      * <code>optional bool assign_seq_num = 3;</code>
21009      */
21010     public boolean hasAssignSeqNum() {
21011       return ((bitField0_ & 0x00000002) == 0x00000002);
21012     }
21013     /**
21014      * <code>optional bool assign_seq_num = 3;</code>
21015      */
21016     public boolean getAssignSeqNum() {
21017       return assignSeqNum_;
21018     }
21019 
21020     private void initFields() {
21021       region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
21022       familyPath_ = java.util.Collections.emptyList();
21023       assignSeqNum_ = false;
21024     }
21025     private byte memoizedIsInitialized = -1;
21026     public final boolean isInitialized() {
21027       byte isInitialized = memoizedIsInitialized;
21028       if (isInitialized != -1) return isInitialized == 1;
21029 
21030       if (!hasRegion()) {
21031         memoizedIsInitialized = 0;
21032         return false;
21033       }
21034       if (!getRegion().isInitialized()) {
21035         memoizedIsInitialized = 0;
21036         return false;
21037       }
21038       for (int i = 0; i < getFamilyPathCount(); i++) {
21039         if (!getFamilyPath(i).isInitialized()) {
21040           memoizedIsInitialized = 0;
21041           return false;
21042         }
21043       }
21044       memoizedIsInitialized = 1;
21045       return true;
21046     }
21047 
21048     public void writeTo(com.google.protobuf.CodedOutputStream output)
21049                         throws java.io.IOException {
21050       getSerializedSize();
21051       if (((bitField0_ & 0x00000001) == 0x00000001)) {
21052         output.writeMessage(1, region_);
21053       }
21054       for (int i = 0; i < familyPath_.size(); i++) {
21055         output.writeMessage(2, familyPath_.get(i));
21056       }
21057       if (((bitField0_ & 0x00000002) == 0x00000002)) {
21058         output.writeBool(3, assignSeqNum_);
21059       }
21060       getUnknownFields().writeTo(output);
21061     }
21062 
21063     private int memoizedSerializedSize = -1;
21064     public int getSerializedSize() {
21065       int size = memoizedSerializedSize;
21066       if (size != -1) return size;
21067 
21068       size = 0;
21069       if (((bitField0_ & 0x00000001) == 0x00000001)) {
21070         size += com.google.protobuf.CodedOutputStream
21071           .computeMessageSize(1, region_);
21072       }
21073       for (int i = 0; i < familyPath_.size(); i++) {
21074         size += com.google.protobuf.CodedOutputStream
21075           .computeMessageSize(2, familyPath_.get(i));
21076       }
21077       if (((bitField0_ & 0x00000002) == 0x00000002)) {
21078         size += com.google.protobuf.CodedOutputStream
21079           .computeBoolSize(3, assignSeqNum_);
21080       }
21081       size += getUnknownFields().getSerializedSize();
21082       memoizedSerializedSize = size;
21083       return size;
21084     }
21085 
21086     private static final long serialVersionUID = 0L;
21087     @java.lang.Override
21088     protected java.lang.Object writeReplace()
21089         throws java.io.ObjectStreamException {
21090       return super.writeReplace();
21091     }
21092 
21093     @java.lang.Override
21094     public boolean equals(final java.lang.Object obj) {
21095       if (obj == this) {
21096        return true;
21097       }
21098       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest)) {
21099         return super.equals(obj);
21100       }
21101       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest) obj;
21102 
21103       boolean result = true;
21104       result = result && (hasRegion() == other.hasRegion());
21105       if (hasRegion()) {
21106         result = result && getRegion()
21107             .equals(other.getRegion());
21108       }
21109       result = result && getFamilyPathList()
21110           .equals(other.getFamilyPathList());
21111       result = result && (hasAssignSeqNum() == other.hasAssignSeqNum());
21112       if (hasAssignSeqNum()) {
21113         result = result && (getAssignSeqNum()
21114             == other.getAssignSeqNum());
21115       }
21116       result = result &&
21117           getUnknownFields().equals(other.getUnknownFields());
21118       return result;
21119     }
21120 
21121     private int memoizedHashCode = 0;
21122     @java.lang.Override
21123     public int hashCode() {
21124       if (memoizedHashCode != 0) {
21125         return memoizedHashCode;
21126       }
21127       int hash = 41;
21128       hash = (19 * hash) + getDescriptorForType().hashCode();
21129       if (hasRegion()) {
21130         hash = (37 * hash) + REGION_FIELD_NUMBER;
21131         hash = (53 * hash) + getRegion().hashCode();
21132       }
21133       if (getFamilyPathCount() > 0) {
21134         hash = (37 * hash) + FAMILY_PATH_FIELD_NUMBER;
21135         hash = (53 * hash) + getFamilyPathList().hashCode();
21136       }
21137       if (hasAssignSeqNum()) {
21138         hash = (37 * hash) + ASSIGN_SEQ_NUM_FIELD_NUMBER;
21139         hash = (53 * hash) + hashBoolean(getAssignSeqNum());
21140       }
21141       hash = (29 * hash) + getUnknownFields().hashCode();
21142       memoizedHashCode = hash;
21143       return hash;
21144     }
21145 
21146     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom(
21147         com.google.protobuf.ByteString data)
21148         throws com.google.protobuf.InvalidProtocolBufferException {
21149       return PARSER.parseFrom(data);
21150     }
21151     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom(
21152         com.google.protobuf.ByteString data,
21153         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21154         throws com.google.protobuf.InvalidProtocolBufferException {
21155       return PARSER.parseFrom(data, extensionRegistry);
21156     }
21157     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom(byte[] data)
21158         throws com.google.protobuf.InvalidProtocolBufferException {
21159       return PARSER.parseFrom(data);
21160     }
21161     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom(
21162         byte[] data,
21163         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21164         throws com.google.protobuf.InvalidProtocolBufferException {
21165       return PARSER.parseFrom(data, extensionRegistry);
21166     }
21167     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom(java.io.InputStream input)
21168         throws java.io.IOException {
21169       return PARSER.parseFrom(input);
21170     }
21171     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom(
21172         java.io.InputStream input,
21173         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21174         throws java.io.IOException {
21175       return PARSER.parseFrom(input, extensionRegistry);
21176     }
21177     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseDelimitedFrom(java.io.InputStream input)
21178         throws java.io.IOException {
21179       return PARSER.parseDelimitedFrom(input);
21180     }
21181     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseDelimitedFrom(
21182         java.io.InputStream input,
21183         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21184         throws java.io.IOException {
21185       return PARSER.parseDelimitedFrom(input, extensionRegistry);
21186     }
21187     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom(
21188         com.google.protobuf.CodedInputStream input)
21189         throws java.io.IOException {
21190       return PARSER.parseFrom(input);
21191     }
21192     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom(
21193         com.google.protobuf.CodedInputStream input,
21194         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21195         throws java.io.IOException {
21196       return PARSER.parseFrom(input, extensionRegistry);
21197     }
21198 
21199     public static Builder newBuilder() { return Builder.create(); }
21200     public Builder newBuilderForType() { return newBuilder(); }
21201     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest prototype) {
21202       return newBuilder().mergeFrom(prototype);
21203     }
21204     public Builder toBuilder() { return newBuilder(this); }
21205 
21206     @java.lang.Override
21207     protected Builder newBuilderForType(
21208         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
21209       Builder builder = new Builder(parent);
21210       return builder;
21211     }
21212     /**
21213      * Protobuf type {@code hbase.pb.BulkLoadHFileRequest}
21214      *
21215      * <pre>
21216      **
21217      * Atomically bulk load multiple HFiles (say from different column families)
21218      * into an open region.
21219      * </pre>
21220      */
21221     public static final class Builder extends
21222         com.google.protobuf.GeneratedMessage.Builder<Builder>
21223        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequestOrBuilder {
21224       public static final com.google.protobuf.Descriptors.Descriptor
21225           getDescriptor() {
21226         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileRequest_descriptor;
21227       }
21228 
21229       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
21230           internalGetFieldAccessorTable() {
21231         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileRequest_fieldAccessorTable
21232             .ensureFieldAccessorsInitialized(
21233                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.Builder.class);
21234       }
21235 
21236       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.newBuilder()
21237       private Builder() {
21238         maybeForceBuilderInitialization();
21239       }
21240 
21241       private Builder(
21242           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
21243         super(parent);
21244         maybeForceBuilderInitialization();
21245       }
21246       private void maybeForceBuilderInitialization() {
21247         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
21248           getRegionFieldBuilder();
21249           getFamilyPathFieldBuilder();
21250         }
21251       }
21252       private static Builder create() {
21253         return new Builder();
21254       }
21255 
21256       public Builder clear() {
21257         super.clear();
21258         if (regionBuilder_ == null) {
21259           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
21260         } else {
21261           regionBuilder_.clear();
21262         }
21263         bitField0_ = (bitField0_ & ~0x00000001);
21264         if (familyPathBuilder_ == null) {
21265           familyPath_ = java.util.Collections.emptyList();
21266           bitField0_ = (bitField0_ & ~0x00000002);
21267         } else {
21268           familyPathBuilder_.clear();
21269         }
21270         assignSeqNum_ = false;
21271         bitField0_ = (bitField0_ & ~0x00000004);
21272         return this;
21273       }
21274 
21275       public Builder clone() {
21276         return create().mergeFrom(buildPartial());
21277       }
21278 
21279       public com.google.protobuf.Descriptors.Descriptor
21280           getDescriptorForType() {
21281         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileRequest_descriptor;
21282       }
21283 
21284       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest getDefaultInstanceForType() {
21285         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.getDefaultInstance();
21286       }
21287 
21288       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest build() {
21289         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest result = buildPartial();
21290         if (!result.isInitialized()) {
21291           throw newUninitializedMessageException(result);
21292         }
21293         return result;
21294       }
21295 
21296       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest buildPartial() {
21297         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest(this);
21298         int from_bitField0_ = bitField0_;
21299         int to_bitField0_ = 0;
21300         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
21301           to_bitField0_ |= 0x00000001;
21302         }
21303         if (regionBuilder_ == null) {
21304           result.region_ = region_;
21305         } else {
21306           result.region_ = regionBuilder_.build();
21307         }
21308         if (familyPathBuilder_ == null) {
21309           if (((bitField0_ & 0x00000002) == 0x00000002)) {
21310             familyPath_ = java.util.Collections.unmodifiableList(familyPath_);
21311             bitField0_ = (bitField0_ & ~0x00000002);
21312           }
21313           result.familyPath_ = familyPath_;
21314         } else {
21315           result.familyPath_ = familyPathBuilder_.build();
21316         }
21317         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
21318           to_bitField0_ |= 0x00000002;
21319         }
21320         result.assignSeqNum_ = assignSeqNum_;
21321         result.bitField0_ = to_bitField0_;
21322         onBuilt();
21323         return result;
21324       }
21325 
21326       public Builder mergeFrom(com.google.protobuf.Message other) {
21327         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest) {
21328           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest)other);
21329         } else {
21330           super.mergeFrom(other);
21331           return this;
21332         }
21333       }
21334 
21335       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest other) {
21336         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.getDefaultInstance()) return this;
21337         if (other.hasRegion()) {
21338           mergeRegion(other.getRegion());
21339         }
21340         if (familyPathBuilder_ == null) {
21341           if (!other.familyPath_.isEmpty()) {
21342             if (familyPath_.isEmpty()) {
21343               familyPath_ = other.familyPath_;
21344               bitField0_ = (bitField0_ & ~0x00000002);
21345             } else {
21346               ensureFamilyPathIsMutable();
21347               familyPath_.addAll(other.familyPath_);
21348             }
21349             onChanged();
21350           }
21351         } else {
21352           if (!other.familyPath_.isEmpty()) {
21353             if (familyPathBuilder_.isEmpty()) {
21354               familyPathBuilder_.dispose();
21355               familyPathBuilder_ = null;
21356               familyPath_ = other.familyPath_;
21357               bitField0_ = (bitField0_ & ~0x00000002);
21358               familyPathBuilder_ = 
21359                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
21360                    getFamilyPathFieldBuilder() : null;
21361             } else {
21362               familyPathBuilder_.addAllMessages(other.familyPath_);
21363             }
21364           }
21365         }
21366         if (other.hasAssignSeqNum()) {
21367           setAssignSeqNum(other.getAssignSeqNum());
21368         }
21369         this.mergeUnknownFields(other.getUnknownFields());
21370         return this;
21371       }
21372 
21373       public final boolean isInitialized() {
21374         if (!hasRegion()) {
21375           
21376           return false;
21377         }
21378         if (!getRegion().isInitialized()) {
21379           
21380           return false;
21381         }
21382         for (int i = 0; i < getFamilyPathCount(); i++) {
21383           if (!getFamilyPath(i).isInitialized()) {
21384             
21385             return false;
21386           }
21387         }
21388         return true;
21389       }
21390 
21391       public Builder mergeFrom(
21392           com.google.protobuf.CodedInputStream input,
21393           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21394           throws java.io.IOException {
21395         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parsedMessage = null;
21396         try {
21397           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
21398         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
21399           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest) e.getUnfinishedMessage();
21400           throw e;
21401         } finally {
21402           if (parsedMessage != null) {
21403             mergeFrom(parsedMessage);
21404           }
21405         }
21406         return this;
21407       }
21408       private int bitField0_;
21409 
21410       // required .hbase.pb.RegionSpecifier region = 1;
21411       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
21412       private com.google.protobuf.SingleFieldBuilder<
21413           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_;
21414       /**
21415        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
21416        */
21417       public boolean hasRegion() {
21418         return ((bitField0_ & 0x00000001) == 0x00000001);
21419       }
21420       /**
21421        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
21422        */
21423       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
21424         if (regionBuilder_ == null) {
21425           return region_;
21426         } else {
21427           return regionBuilder_.getMessage();
21428         }
21429       }
21430       /**
21431        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
21432        */
21433       public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
21434         if (regionBuilder_ == null) {
21435           if (value == null) {
21436             throw new NullPointerException();
21437           }
21438           region_ = value;
21439           onChanged();
21440         } else {
21441           regionBuilder_.setMessage(value);
21442         }
21443         bitField0_ |= 0x00000001;
21444         return this;
21445       }
21446       /**
21447        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
21448        */
21449       public Builder setRegion(
21450           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
21451         if (regionBuilder_ == null) {
21452           region_ = builderForValue.build();
21453           onChanged();
21454         } else {
21455           regionBuilder_.setMessage(builderForValue.build());
21456         }
21457         bitField0_ |= 0x00000001;
21458         return this;
21459       }
21460       /**
21461        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
21462        */
21463       public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
21464         if (regionBuilder_ == null) {
21465           if (((bitField0_ & 0x00000001) == 0x00000001) &&
21466               region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) {
21467             region_ =
21468               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial();
21469           } else {
21470             region_ = value;
21471           }
21472           onChanged();
21473         } else {
21474           regionBuilder_.mergeFrom(value);
21475         }
21476         bitField0_ |= 0x00000001;
21477         return this;
21478       }
21479       /**
21480        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
21481        */
21482       public Builder clearRegion() {
21483         if (regionBuilder_ == null) {
21484           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
21485           onChanged();
21486         } else {
21487           regionBuilder_.clear();
21488         }
21489         bitField0_ = (bitField0_ & ~0x00000001);
21490         return this;
21491       }
21492       /**
21493        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
21494        */
21495       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() {
21496         bitField0_ |= 0x00000001;
21497         onChanged();
21498         return getRegionFieldBuilder().getBuilder();
21499       }
21500       /**
21501        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
21502        */
21503       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
21504         if (regionBuilder_ != null) {
21505           return regionBuilder_.getMessageOrBuilder();
21506         } else {
21507           return region_;
21508         }
21509       }
21510       /**
21511        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
21512        */
21513       private com.google.protobuf.SingleFieldBuilder<
21514           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> 
21515           getRegionFieldBuilder() {
21516         if (regionBuilder_ == null) {
21517           regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
21518               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
21519                   region_,
21520                   getParentForChildren(),
21521                   isClean());
21522           region_ = null;
21523         }
21524         return regionBuilder_;
21525       }
21526 
21527       // repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;
21528       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath> familyPath_ =
21529         java.util.Collections.emptyList();
21530       private void ensureFamilyPathIsMutable() {
21531         if (!((bitField0_ & 0x00000002) == 0x00000002)) {
21532           familyPath_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath>(familyPath_);
21533           bitField0_ |= 0x00000002;
21534          }
21535       }
21536 
21537       private com.google.protobuf.RepeatedFieldBuilder<
21538           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder> familyPathBuilder_;
21539 
21540       /**
21541        * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
21542        */
21543       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath> getFamilyPathList() {
21544         if (familyPathBuilder_ == null) {
21545           return java.util.Collections.unmodifiableList(familyPath_);
21546         } else {
21547           return familyPathBuilder_.getMessageList();
21548         }
21549       }
21550       /**
21551        * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
21552        */
21553       public int getFamilyPathCount() {
21554         if (familyPathBuilder_ == null) {
21555           return familyPath_.size();
21556         } else {
21557           return familyPathBuilder_.getCount();
21558         }
21559       }
21560       /**
21561        * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
21562        */
21563       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index) {
21564         if (familyPathBuilder_ == null) {
21565           return familyPath_.get(index);
21566         } else {
21567           return familyPathBuilder_.getMessage(index);
21568         }
21569       }
21570       /**
21571        * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
21572        */
21573       public Builder setFamilyPath(
21574           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath value) {
21575         if (familyPathBuilder_ == null) {
21576           if (value == null) {
21577             throw new NullPointerException();
21578           }
21579           ensureFamilyPathIsMutable();
21580           familyPath_.set(index, value);
21581           onChanged();
21582         } else {
21583           familyPathBuilder_.setMessage(index, value);
21584         }
21585         return this;
21586       }
21587       /**
21588        * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
21589        */
21590       public Builder setFamilyPath(
21591           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) {
21592         if (familyPathBuilder_ == null) {
21593           ensureFamilyPathIsMutable();
21594           familyPath_.set(index, builderForValue.build());
21595           onChanged();
21596         } else {
21597           familyPathBuilder_.setMessage(index, builderForValue.build());
21598         }
21599         return this;
21600       }
21601       /**
21602        * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
21603        */
21604       public Builder addFamilyPath(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath value) {
21605         if (familyPathBuilder_ == null) {
21606           if (value == null) {
21607             throw new NullPointerException();
21608           }
21609           ensureFamilyPathIsMutable();
21610           familyPath_.add(value);
21611           onChanged();
21612         } else {
21613           familyPathBuilder_.addMessage(value);
21614         }
21615         return this;
21616       }
21617       /**
21618        * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
21619        */
21620       public Builder addFamilyPath(
21621           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath value) {
21622         if (familyPathBuilder_ == null) {
21623           if (value == null) {
21624             throw new NullPointerException();
21625           }
21626           ensureFamilyPathIsMutable();
21627           familyPath_.add(index, value);
21628           onChanged();
21629         } else {
21630           familyPathBuilder_.addMessage(index, value);
21631         }
21632         return this;
21633       }
21634       /**
21635        * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
21636        */
21637       public Builder addFamilyPath(
21638           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) {
21639         if (familyPathBuilder_ == null) {
21640           ensureFamilyPathIsMutable();
21641           familyPath_.add(builderForValue.build());
21642           onChanged();
21643         } else {
21644           familyPathBuilder_.addMessage(builderForValue.build());
21645         }
21646         return this;
21647       }
21648       /**
21649        * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
21650        */
21651       public Builder addFamilyPath(
21652           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) {
21653         if (familyPathBuilder_ == null) {
21654           ensureFamilyPathIsMutable();
21655           familyPath_.add(index, builderForValue.build());
21656           onChanged();
21657         } else {
21658           familyPathBuilder_.addMessage(index, builderForValue.build());
21659         }
21660         return this;
21661       }
21662       /**
21663        * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
21664        */
21665       public Builder addAllFamilyPath(
21666           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath> values) {
21667         if (familyPathBuilder_ == null) {
21668           ensureFamilyPathIsMutable();
21669           super.addAll(values, familyPath_);
21670           onChanged();
21671         } else {
21672           familyPathBuilder_.addAllMessages(values);
21673         }
21674         return this;
21675       }
21676       /**
21677        * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
21678        */
21679       public Builder clearFamilyPath() {
21680         if (familyPathBuilder_ == null) {
21681           familyPath_ = java.util.Collections.emptyList();
21682           bitField0_ = (bitField0_ & ~0x00000002);
21683           onChanged();
21684         } else {
21685           familyPathBuilder_.clear();
21686         }
21687         return this;
21688       }
21689       /**
21690        * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
21691        */
21692       public Builder removeFamilyPath(int index) {
21693         if (familyPathBuilder_ == null) {
21694           ensureFamilyPathIsMutable();
21695           familyPath_.remove(index);
21696           onChanged();
21697         } else {
21698           familyPathBuilder_.remove(index);
21699         }
21700         return this;
21701       }
21702       /**
21703        * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
21704        */
21705       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder getFamilyPathBuilder(
21706           int index) {
21707         return getFamilyPathFieldBuilder().getBuilder(index);
21708       }
21709       /**
21710        * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
21711        */
21712       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder(
21713           int index) {
21714         if (familyPathBuilder_ == null) {
21715           return familyPath_.get(index);  } else {
21716           return familyPathBuilder_.getMessageOrBuilder(index);
21717         }
21718       }
21719       /**
21720        * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
21721        */
21722       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder> 
21723            getFamilyPathOrBuilderList() {
21724         if (familyPathBuilder_ != null) {
21725           return familyPathBuilder_.getMessageOrBuilderList();
21726         } else {
21727           return java.util.Collections.unmodifiableList(familyPath_);
21728         }
21729       }
21730       /**
21731        * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
21732        */
21733       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder addFamilyPathBuilder() {
21734         return getFamilyPathFieldBuilder().addBuilder(
21735             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance());
21736       }
21737       /**
21738        * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
21739        */
21740       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder addFamilyPathBuilder(
21741           int index) {
21742         return getFamilyPathFieldBuilder().addBuilder(
21743             index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance());
21744       }
21745       /**
21746        * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
21747        */
21748       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder> 
21749            getFamilyPathBuilderList() {
21750         return getFamilyPathFieldBuilder().getBuilderList();
21751       }
21752       private com.google.protobuf.RepeatedFieldBuilder<
21753           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder> 
21754           getFamilyPathFieldBuilder() {
21755         if (familyPathBuilder_ == null) {
21756           familyPathBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
21757               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder>(
21758                   familyPath_,
21759                   ((bitField0_ & 0x00000002) == 0x00000002),
21760                   getParentForChildren(),
21761                   isClean());
21762           familyPath_ = null;
21763         }
21764         return familyPathBuilder_;
21765       }
21766 
21767       // optional bool assign_seq_num = 3;
21768       private boolean assignSeqNum_ ;
21769       /**
21770        * <code>optional bool assign_seq_num = 3;</code>
21771        */
21772       public boolean hasAssignSeqNum() {
21773         return ((bitField0_ & 0x00000004) == 0x00000004);
21774       }
21775       /**
21776        * <code>optional bool assign_seq_num = 3;</code>
21777        */
21778       public boolean getAssignSeqNum() {
21779         return assignSeqNum_;
21780       }
21781       /**
21782        * <code>optional bool assign_seq_num = 3;</code>
21783        */
21784       public Builder setAssignSeqNum(boolean value) {
21785         bitField0_ |= 0x00000004;
21786         assignSeqNum_ = value;
21787         onChanged();
21788         return this;
21789       }
21790       /**
21791        * <code>optional bool assign_seq_num = 3;</code>
21792        */
21793       public Builder clearAssignSeqNum() {
21794         bitField0_ = (bitField0_ & ~0x00000004);
21795         assignSeqNum_ = false;
21796         onChanged();
21797         return this;
21798       }
21799 
21800       // @@protoc_insertion_point(builder_scope:hbase.pb.BulkLoadHFileRequest)
21801     }
21802 
21803     static {
21804       defaultInstance = new BulkLoadHFileRequest(true);
21805       defaultInstance.initFields();
21806     }
21807 
21808     // @@protoc_insertion_point(class_scope:hbase.pb.BulkLoadHFileRequest)
21809   }
21810 
21811   public interface BulkLoadHFileResponseOrBuilder
21812       extends com.google.protobuf.MessageOrBuilder {
21813 
21814     // required bool loaded = 1;
21815     /**
21816      * <code>required bool loaded = 1;</code>
21817      */
21818     boolean hasLoaded();
21819     /**
21820      * <code>required bool loaded = 1;</code>
21821      */
21822     boolean getLoaded();
21823   }
21824   /**
21825    * Protobuf type {@code hbase.pb.BulkLoadHFileResponse}
21826    */
21827   public static final class BulkLoadHFileResponse extends
21828       com.google.protobuf.GeneratedMessage
21829       implements BulkLoadHFileResponseOrBuilder {
21830     // Use BulkLoadHFileResponse.newBuilder() to construct.
21831     private BulkLoadHFileResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
21832       super(builder);
21833       this.unknownFields = builder.getUnknownFields();
21834     }
21835     private BulkLoadHFileResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
21836 
21837     private static final BulkLoadHFileResponse defaultInstance;
21838     public static BulkLoadHFileResponse getDefaultInstance() {
21839       return defaultInstance;
21840     }
21841 
21842     public BulkLoadHFileResponse getDefaultInstanceForType() {
21843       return defaultInstance;
21844     }
21845 
21846     private final com.google.protobuf.UnknownFieldSet unknownFields;
21847     @java.lang.Override
21848     public final com.google.protobuf.UnknownFieldSet
21849         getUnknownFields() {
21850       return this.unknownFields;
21851     }
21852     private BulkLoadHFileResponse(
21853         com.google.protobuf.CodedInputStream input,
21854         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21855         throws com.google.protobuf.InvalidProtocolBufferException {
21856       initFields();
21857       int mutable_bitField0_ = 0;
21858       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
21859           com.google.protobuf.UnknownFieldSet.newBuilder();
21860       try {
21861         boolean done = false;
21862         while (!done) {
21863           int tag = input.readTag();
21864           switch (tag) {
21865             case 0:
21866               done = true;
21867               break;
21868             default: {
21869               if (!parseUnknownField(input, unknownFields,
21870                                      extensionRegistry, tag)) {
21871                 done = true;
21872               }
21873               break;
21874             }
21875             case 8: {
21876               bitField0_ |= 0x00000001;
21877               loaded_ = input.readBool();
21878               break;
21879             }
21880           }
21881         }
21882       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
21883         throw e.setUnfinishedMessage(this);
21884       } catch (java.io.IOException e) {
21885         throw new com.google.protobuf.InvalidProtocolBufferException(
21886             e.getMessage()).setUnfinishedMessage(this);
21887       } finally {
21888         this.unknownFields = unknownFields.build();
21889         makeExtensionsImmutable();
21890       }
21891     }
21892     public static final com.google.protobuf.Descriptors.Descriptor
21893         getDescriptor() {
21894       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileResponse_descriptor;
21895     }
21896 
21897     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
21898         internalGetFieldAccessorTable() {
21899       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileResponse_fieldAccessorTable
21900           .ensureFieldAccessorsInitialized(
21901               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.Builder.class);
21902     }
21903 
21904     public static com.google.protobuf.Parser<BulkLoadHFileResponse> PARSER =
21905         new com.google.protobuf.AbstractParser<BulkLoadHFileResponse>() {
21906       public BulkLoadHFileResponse parsePartialFrom(
21907           com.google.protobuf.CodedInputStream input,
21908           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21909           throws com.google.protobuf.InvalidProtocolBufferException {
21910         return new BulkLoadHFileResponse(input, extensionRegistry);
21911       }
21912     };
21913 
21914     @java.lang.Override
21915     public com.google.protobuf.Parser<BulkLoadHFileResponse> getParserForType() {
21916       return PARSER;
21917     }
21918 
21919     private int bitField0_;
21920     // required bool loaded = 1;
21921     public static final int LOADED_FIELD_NUMBER = 1;
21922     private boolean loaded_;
21923     /**
21924      * <code>required bool loaded = 1;</code>
21925      */
21926     public boolean hasLoaded() {
21927       return ((bitField0_ & 0x00000001) == 0x00000001);
21928     }
21929     /**
21930      * <code>required bool loaded = 1;</code>
21931      */
21932     public boolean getLoaded() {
21933       return loaded_;
21934     }
21935 
21936     private void initFields() {
21937       loaded_ = false;
21938     }
21939     private byte memoizedIsInitialized = -1;
21940     public final boolean isInitialized() {
21941       byte isInitialized = memoizedIsInitialized;
21942       if (isInitialized != -1) return isInitialized == 1;
21943 
21944       if (!hasLoaded()) {
21945         memoizedIsInitialized = 0;
21946         return false;
21947       }
21948       memoizedIsInitialized = 1;
21949       return true;
21950     }
21951 
21952     public void writeTo(com.google.protobuf.CodedOutputStream output)
21953                         throws java.io.IOException {
21954       getSerializedSize();
21955       if (((bitField0_ & 0x00000001) == 0x00000001)) {
21956         output.writeBool(1, loaded_);
21957       }
21958       getUnknownFields().writeTo(output);
21959     }
21960 
21961     private int memoizedSerializedSize = -1;
21962     public int getSerializedSize() {
21963       int size = memoizedSerializedSize;
21964       if (size != -1) return size;
21965 
21966       size = 0;
21967       if (((bitField0_ & 0x00000001) == 0x00000001)) {
21968         size += com.google.protobuf.CodedOutputStream
21969           .computeBoolSize(1, loaded_);
21970       }
21971       size += getUnknownFields().getSerializedSize();
21972       memoizedSerializedSize = size;
21973       return size;
21974     }
21975 
21976     private static final long serialVersionUID = 0L;
21977     @java.lang.Override
21978     protected java.lang.Object writeReplace()
21979         throws java.io.ObjectStreamException {
21980       return super.writeReplace();
21981     }
21982 
21983     @java.lang.Override
21984     public boolean equals(final java.lang.Object obj) {
21985       if (obj == this) {
21986        return true;
21987       }
21988       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse)) {
21989         return super.equals(obj);
21990       }
21991       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse) obj;
21992 
21993       boolean result = true;
21994       result = result && (hasLoaded() == other.hasLoaded());
21995       if (hasLoaded()) {
21996         result = result && (getLoaded()
21997             == other.getLoaded());
21998       }
21999       result = result &&
22000           getUnknownFields().equals(other.getUnknownFields());
22001       return result;
22002     }
22003 
22004     private int memoizedHashCode = 0;
22005     @java.lang.Override
22006     public int hashCode() {
22007       if (memoizedHashCode != 0) {
22008         return memoizedHashCode;
22009       }
22010       int hash = 41;
22011       hash = (19 * hash) + getDescriptorForType().hashCode();
22012       if (hasLoaded()) {
22013         hash = (37 * hash) + LOADED_FIELD_NUMBER;
22014         hash = (53 * hash) + hashBoolean(getLoaded());
22015       }
22016       hash = (29 * hash) + getUnknownFields().hashCode();
22017       memoizedHashCode = hash;
22018       return hash;
22019     }
22020 
22021     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom(
22022         com.google.protobuf.ByteString data)
22023         throws com.google.protobuf.InvalidProtocolBufferException {
22024       return PARSER.parseFrom(data);
22025     }
22026     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom(
22027         com.google.protobuf.ByteString data,
22028         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22029         throws com.google.protobuf.InvalidProtocolBufferException {
22030       return PARSER.parseFrom(data, extensionRegistry);
22031     }
22032     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom(byte[] data)
22033         throws com.google.protobuf.InvalidProtocolBufferException {
22034       return PARSER.parseFrom(data);
22035     }
22036     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom(
22037         byte[] data,
22038         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22039         throws com.google.protobuf.InvalidProtocolBufferException {
22040       return PARSER.parseFrom(data, extensionRegistry);
22041     }
22042     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom(java.io.InputStream input)
22043         throws java.io.IOException {
22044       return PARSER.parseFrom(input);
22045     }
22046     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom(
22047         java.io.InputStream input,
22048         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22049         throws java.io.IOException {
22050       return PARSER.parseFrom(input, extensionRegistry);
22051     }
22052     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseDelimitedFrom(java.io.InputStream input)
22053         throws java.io.IOException {
22054       return PARSER.parseDelimitedFrom(input);
22055     }
22056     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseDelimitedFrom(
22057         java.io.InputStream input,
22058         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22059         throws java.io.IOException {
22060       return PARSER.parseDelimitedFrom(input, extensionRegistry);
22061     }
22062     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom(
22063         com.google.protobuf.CodedInputStream input)
22064         throws java.io.IOException {
22065       return PARSER.parseFrom(input);
22066     }
22067     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom(
22068         com.google.protobuf.CodedInputStream input,
22069         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22070         throws java.io.IOException {
22071       return PARSER.parseFrom(input, extensionRegistry);
22072     }
22073 
22074     public static Builder newBuilder() { return Builder.create(); }
22075     public Builder newBuilderForType() { return newBuilder(); }
22076     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse prototype) {
22077       return newBuilder().mergeFrom(prototype);
22078     }
22079     public Builder toBuilder() { return newBuilder(this); }
22080 
22081     @java.lang.Override
22082     protected Builder newBuilderForType(
22083         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
22084       Builder builder = new Builder(parent);
22085       return builder;
22086     }
22087     /**
22088      * Protobuf type {@code hbase.pb.BulkLoadHFileResponse}
22089      */
22090     public static final class Builder extends
22091         com.google.protobuf.GeneratedMessage.Builder<Builder>
22092        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponseOrBuilder {
22093       public static final com.google.protobuf.Descriptors.Descriptor
22094           getDescriptor() {
22095         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileResponse_descriptor;
22096       }
22097 
22098       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
22099           internalGetFieldAccessorTable() {
22100         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileResponse_fieldAccessorTable
22101             .ensureFieldAccessorsInitialized(
22102                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.Builder.class);
22103       }
22104 
22105       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.newBuilder()
22106       private Builder() {
22107         maybeForceBuilderInitialization();
22108       }
22109 
22110       private Builder(
22111           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
22112         super(parent);
22113         maybeForceBuilderInitialization();
22114       }
22115       private void maybeForceBuilderInitialization() {
22116         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
22117         }
22118       }
22119       private static Builder create() {
22120         return new Builder();
22121       }
22122 
22123       public Builder clear() {
22124         super.clear();
22125         loaded_ = false;
22126         bitField0_ = (bitField0_ & ~0x00000001);
22127         return this;
22128       }
22129 
22130       public Builder clone() {
22131         return create().mergeFrom(buildPartial());
22132       }
22133 
22134       public com.google.protobuf.Descriptors.Descriptor
22135           getDescriptorForType() {
22136         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileResponse_descriptor;
22137       }
22138 
22139       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse getDefaultInstanceForType() {
22140         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance();
22141       }
22142 
22143       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse build() {
22144         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse result = buildPartial();
22145         if (!result.isInitialized()) {
22146           throw newUninitializedMessageException(result);
22147         }
22148         return result;
22149       }
22150 
22151       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse buildPartial() {
22152         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse(this);
22153         int from_bitField0_ = bitField0_;
22154         int to_bitField0_ = 0;
22155         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
22156           to_bitField0_ |= 0x00000001;
22157         }
22158         result.loaded_ = loaded_;
22159         result.bitField0_ = to_bitField0_;
22160         onBuilt();
22161         return result;
22162       }
22163 
22164       public Builder mergeFrom(com.google.protobuf.Message other) {
22165         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse) {
22166           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse)other);
22167         } else {
22168           super.mergeFrom(other);
22169           return this;
22170         }
22171       }
22172 
22173       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse other) {
22174         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance()) return this;
22175         if (other.hasLoaded()) {
22176           setLoaded(other.getLoaded());
22177         }
22178         this.mergeUnknownFields(other.getUnknownFields());
22179         return this;
22180       }
22181 
22182       public final boolean isInitialized() {
22183         if (!hasLoaded()) {
22184           
22185           return false;
22186         }
22187         return true;
22188       }
22189 
22190       public Builder mergeFrom(
22191           com.google.protobuf.CodedInputStream input,
22192           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22193           throws java.io.IOException {
22194         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parsedMessage = null;
22195         try {
22196           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
22197         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
22198           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse) e.getUnfinishedMessage();
22199           throw e;
22200         } finally {
22201           if (parsedMessage != null) {
22202             mergeFrom(parsedMessage);
22203           }
22204         }
22205         return this;
22206       }
22207       private int bitField0_;
22208 
22209       // required bool loaded = 1;
22210       private boolean loaded_ ;
22211       /**
22212        * <code>required bool loaded = 1;</code>
22213        */
22214       public boolean hasLoaded() {
22215         return ((bitField0_ & 0x00000001) == 0x00000001);
22216       }
22217       /**
22218        * <code>required bool loaded = 1;</code>
22219        */
22220       public boolean getLoaded() {
22221         return loaded_;
22222       }
22223       /**
22224        * <code>required bool loaded = 1;</code>
22225        */
22226       public Builder setLoaded(boolean value) {
22227         bitField0_ |= 0x00000001;
22228         loaded_ = value;
22229         onChanged();
22230         return this;
22231       }
22232       /**
22233        * <code>required bool loaded = 1;</code>
22234        */
22235       public Builder clearLoaded() {
22236         bitField0_ = (bitField0_ & ~0x00000001);
22237         loaded_ = false;
22238         onChanged();
22239         return this;
22240       }
22241 
22242       // @@protoc_insertion_point(builder_scope:hbase.pb.BulkLoadHFileResponse)
22243     }
22244 
22245     static {
22246       defaultInstance = new BulkLoadHFileResponse(true);
22247       defaultInstance.initFields();
22248     }
22249 
22250     // @@protoc_insertion_point(class_scope:hbase.pb.BulkLoadHFileResponse)
22251   }
22252 
22253   public interface CoprocessorServiceCallOrBuilder
22254       extends com.google.protobuf.MessageOrBuilder {
22255 
22256     // required bytes row = 1;
22257     /**
22258      * <code>required bytes row = 1;</code>
22259      */
22260     boolean hasRow();
22261     /**
22262      * <code>required bytes row = 1;</code>
22263      */
22264     com.google.protobuf.ByteString getRow();
22265 
22266     // required string service_name = 2;
22267     /**
22268      * <code>required string service_name = 2;</code>
22269      */
22270     boolean hasServiceName();
22271     /**
22272      * <code>required string service_name = 2;</code>
22273      */
22274     java.lang.String getServiceName();
22275     /**
22276      * <code>required string service_name = 2;</code>
22277      */
22278     com.google.protobuf.ByteString
22279         getServiceNameBytes();
22280 
22281     // required string method_name = 3;
22282     /**
22283      * <code>required string method_name = 3;</code>
22284      */
22285     boolean hasMethodName();
22286     /**
22287      * <code>required string method_name = 3;</code>
22288      */
22289     java.lang.String getMethodName();
22290     /**
22291      * <code>required string method_name = 3;</code>
22292      */
22293     com.google.protobuf.ByteString
22294         getMethodNameBytes();
22295 
22296     // required bytes request = 4;
22297     /**
22298      * <code>required bytes request = 4;</code>
22299      */
22300     boolean hasRequest();
22301     /**
22302      * <code>required bytes request = 4;</code>
22303      */
22304     com.google.protobuf.ByteString getRequest();
22305   }
22306   /**
22307    * Protobuf type {@code hbase.pb.CoprocessorServiceCall}
22308    */
22309   public static final class CoprocessorServiceCall extends
22310       com.google.protobuf.GeneratedMessage
22311       implements CoprocessorServiceCallOrBuilder {
22312     // Use CoprocessorServiceCall.newBuilder() to construct.
22313     private CoprocessorServiceCall(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
22314       super(builder);
22315       this.unknownFields = builder.getUnknownFields();
22316     }
22317     private CoprocessorServiceCall(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
22318 
22319     private static final CoprocessorServiceCall defaultInstance;
22320     public static CoprocessorServiceCall getDefaultInstance() {
22321       return defaultInstance;
22322     }
22323 
22324     public CoprocessorServiceCall getDefaultInstanceForType() {
22325       return defaultInstance;
22326     }
22327 
22328     private final com.google.protobuf.UnknownFieldSet unknownFields;
22329     @java.lang.Override
22330     public final com.google.protobuf.UnknownFieldSet
22331         getUnknownFields() {
22332       return this.unknownFields;
22333     }
22334     private CoprocessorServiceCall(
22335         com.google.protobuf.CodedInputStream input,
22336         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22337         throws com.google.protobuf.InvalidProtocolBufferException {
22338       initFields();
22339       int mutable_bitField0_ = 0;
22340       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
22341           com.google.protobuf.UnknownFieldSet.newBuilder();
22342       try {
22343         boolean done = false;
22344         while (!done) {
22345           int tag = input.readTag();
22346           switch (tag) {
22347             case 0:
22348               done = true;
22349               break;
22350             default: {
22351               if (!parseUnknownField(input, unknownFields,
22352                                      extensionRegistry, tag)) {
22353                 done = true;
22354               }
22355               break;
22356             }
22357             case 10: {
22358               bitField0_ |= 0x00000001;
22359               row_ = input.readBytes();
22360               break;
22361             }
22362             case 18: {
22363               bitField0_ |= 0x00000002;
22364               serviceName_ = input.readBytes();
22365               break;
22366             }
22367             case 26: {
22368               bitField0_ |= 0x00000004;
22369               methodName_ = input.readBytes();
22370               break;
22371             }
22372             case 34: {
22373               bitField0_ |= 0x00000008;
22374               request_ = input.readBytes();
22375               break;
22376             }
22377           }
22378         }
22379       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
22380         throw e.setUnfinishedMessage(this);
22381       } catch (java.io.IOException e) {
22382         throw new com.google.protobuf.InvalidProtocolBufferException(
22383             e.getMessage()).setUnfinishedMessage(this);
22384       } finally {
22385         this.unknownFields = unknownFields.build();
22386         makeExtensionsImmutable();
22387       }
22388     }
22389     public static final com.google.protobuf.Descriptors.Descriptor
22390         getDescriptor() {
22391       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceCall_descriptor;
22392     }
22393 
22394     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
22395         internalGetFieldAccessorTable() {
22396       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceCall_fieldAccessorTable
22397           .ensureFieldAccessorsInitialized(
22398               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder.class);
22399     }
22400 
22401     public static com.google.protobuf.Parser<CoprocessorServiceCall> PARSER =
22402         new com.google.protobuf.AbstractParser<CoprocessorServiceCall>() {
22403       public CoprocessorServiceCall parsePartialFrom(
22404           com.google.protobuf.CodedInputStream input,
22405           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22406           throws com.google.protobuf.InvalidProtocolBufferException {
22407         return new CoprocessorServiceCall(input, extensionRegistry);
22408       }
22409     };
22410 
22411     @java.lang.Override
22412     public com.google.protobuf.Parser<CoprocessorServiceCall> getParserForType() {
22413       return PARSER;
22414     }
22415 
22416     private int bitField0_;
22417     // required bytes row = 1;
22418     public static final int ROW_FIELD_NUMBER = 1;
22419     private com.google.protobuf.ByteString row_;
22420     /**
22421      * <code>required bytes row = 1;</code>
22422      */
22423     public boolean hasRow() {
22424       return ((bitField0_ & 0x00000001) == 0x00000001);
22425     }
22426     /**
22427      * <code>required bytes row = 1;</code>
22428      */
22429     public com.google.protobuf.ByteString getRow() {
22430       return row_;
22431     }
22432 
22433     // required string service_name = 2;
22434     public static final int SERVICE_NAME_FIELD_NUMBER = 2;
22435     private java.lang.Object serviceName_;
22436     /**
22437      * <code>required string service_name = 2;</code>
22438      */
22439     public boolean hasServiceName() {
22440       return ((bitField0_ & 0x00000002) == 0x00000002);
22441     }
22442     /**
22443      * <code>required string service_name = 2;</code>
22444      */
22445     public java.lang.String getServiceName() {
22446       java.lang.Object ref = serviceName_;
22447       if (ref instanceof java.lang.String) {
22448         return (java.lang.String) ref;
22449       } else {
22450         com.google.protobuf.ByteString bs = 
22451             (com.google.protobuf.ByteString) ref;
22452         java.lang.String s = bs.toStringUtf8();
22453         if (bs.isValidUtf8()) {
22454           serviceName_ = s;
22455         }
22456         return s;
22457       }
22458     }
22459     /**
22460      * <code>required string service_name = 2;</code>
22461      */
22462     public com.google.protobuf.ByteString
22463         getServiceNameBytes() {
22464       java.lang.Object ref = serviceName_;
22465       if (ref instanceof java.lang.String) {
22466         com.google.protobuf.ByteString b = 
22467             com.google.protobuf.ByteString.copyFromUtf8(
22468                 (java.lang.String) ref);
22469         serviceName_ = b;
22470         return b;
22471       } else {
22472         return (com.google.protobuf.ByteString) ref;
22473       }
22474     }
22475 
22476     // required string method_name = 3;
22477     public static final int METHOD_NAME_FIELD_NUMBER = 3;
22478     private java.lang.Object methodName_;
22479     /**
22480      * <code>required string method_name = 3;</code>
22481      */
22482     public boolean hasMethodName() {
22483       return ((bitField0_ & 0x00000004) == 0x00000004);
22484     }
22485     /**
22486      * <code>required string method_name = 3;</code>
22487      */
22488     public java.lang.String getMethodName() {
22489       java.lang.Object ref = methodName_;
22490       if (ref instanceof java.lang.String) {
22491         return (java.lang.String) ref;
22492       } else {
22493         com.google.protobuf.ByteString bs = 
22494             (com.google.protobuf.ByteString) ref;
22495         java.lang.String s = bs.toStringUtf8();
22496         if (bs.isValidUtf8()) {
22497           methodName_ = s;
22498         }
22499         return s;
22500       }
22501     }
22502     /**
22503      * <code>required string method_name = 3;</code>
22504      */
22505     public com.google.protobuf.ByteString
22506         getMethodNameBytes() {
22507       java.lang.Object ref = methodName_;
22508       if (ref instanceof java.lang.String) {
22509         com.google.protobuf.ByteString b = 
22510             com.google.protobuf.ByteString.copyFromUtf8(
22511                 (java.lang.String) ref);
22512         methodName_ = b;
22513         return b;
22514       } else {
22515         return (com.google.protobuf.ByteString) ref;
22516       }
22517     }
22518 
22519     // required bytes request = 4;
22520     public static final int REQUEST_FIELD_NUMBER = 4;
22521     private com.google.protobuf.ByteString request_;
22522     /**
22523      * <code>required bytes request = 4;</code>
22524      */
22525     public boolean hasRequest() {
22526       return ((bitField0_ & 0x00000008) == 0x00000008);
22527     }
22528     /**
22529      * <code>required bytes request = 4;</code>
22530      */
22531     public com.google.protobuf.ByteString getRequest() {
22532       return request_;
22533     }
22534 
22535     private void initFields() {
22536       row_ = com.google.protobuf.ByteString.EMPTY;
22537       serviceName_ = "";
22538       methodName_ = "";
22539       request_ = com.google.protobuf.ByteString.EMPTY;
22540     }
22541     private byte memoizedIsInitialized = -1;
22542     public final boolean isInitialized() {
22543       byte isInitialized = memoizedIsInitialized;
22544       if (isInitialized != -1) return isInitialized == 1;
22545 
22546       if (!hasRow()) {
22547         memoizedIsInitialized = 0;
22548         return false;
22549       }
22550       if (!hasServiceName()) {
22551         memoizedIsInitialized = 0;
22552         return false;
22553       }
22554       if (!hasMethodName()) {
22555         memoizedIsInitialized = 0;
22556         return false;
22557       }
22558       if (!hasRequest()) {
22559         memoizedIsInitialized = 0;
22560         return false;
22561       }
22562       memoizedIsInitialized = 1;
22563       return true;
22564     }
22565 
22566     public void writeTo(com.google.protobuf.CodedOutputStream output)
22567                         throws java.io.IOException {
22568       getSerializedSize();
22569       if (((bitField0_ & 0x00000001) == 0x00000001)) {
22570         output.writeBytes(1, row_);
22571       }
22572       if (((bitField0_ & 0x00000002) == 0x00000002)) {
22573         output.writeBytes(2, getServiceNameBytes());
22574       }
22575       if (((bitField0_ & 0x00000004) == 0x00000004)) {
22576         output.writeBytes(3, getMethodNameBytes());
22577       }
22578       if (((bitField0_ & 0x00000008) == 0x00000008)) {
22579         output.writeBytes(4, request_);
22580       }
22581       getUnknownFields().writeTo(output);
22582     }
22583 
22584     private int memoizedSerializedSize = -1;
22585     public int getSerializedSize() {
22586       int size = memoizedSerializedSize;
22587       if (size != -1) return size;
22588 
22589       size = 0;
22590       if (((bitField0_ & 0x00000001) == 0x00000001)) {
22591         size += com.google.protobuf.CodedOutputStream
22592           .computeBytesSize(1, row_);
22593       }
22594       if (((bitField0_ & 0x00000002) == 0x00000002)) {
22595         size += com.google.protobuf.CodedOutputStream
22596           .computeBytesSize(2, getServiceNameBytes());
22597       }
22598       if (((bitField0_ & 0x00000004) == 0x00000004)) {
22599         size += com.google.protobuf.CodedOutputStream
22600           .computeBytesSize(3, getMethodNameBytes());
22601       }
22602       if (((bitField0_ & 0x00000008) == 0x00000008)) {
22603         size += com.google.protobuf.CodedOutputStream
22604           .computeBytesSize(4, request_);
22605       }
22606       size += getUnknownFields().getSerializedSize();
22607       memoizedSerializedSize = size;
22608       return size;
22609     }
22610 
22611     private static final long serialVersionUID = 0L;
22612     @java.lang.Override
22613     protected java.lang.Object writeReplace()
22614         throws java.io.ObjectStreamException {
22615       return super.writeReplace();
22616     }
22617 
22618     @java.lang.Override
22619     public boolean equals(final java.lang.Object obj) {
22620       if (obj == this) {
22621        return true;
22622       }
22623       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall)) {
22624         return super.equals(obj);
22625       }
22626       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall) obj;
22627 
22628       boolean result = true;
22629       result = result && (hasRow() == other.hasRow());
22630       if (hasRow()) {
22631         result = result && getRow()
22632             .equals(other.getRow());
22633       }
22634       result = result && (hasServiceName() == other.hasServiceName());
22635       if (hasServiceName()) {
22636         result = result && getServiceName()
22637             .equals(other.getServiceName());
22638       }
22639       result = result && (hasMethodName() == other.hasMethodName());
22640       if (hasMethodName()) {
22641         result = result && getMethodName()
22642             .equals(other.getMethodName());
22643       }
22644       result = result && (hasRequest() == other.hasRequest());
22645       if (hasRequest()) {
22646         result = result && getRequest()
22647             .equals(other.getRequest());
22648       }
22649       result = result &&
22650           getUnknownFields().equals(other.getUnknownFields());
22651       return result;
22652     }
22653 
22654     private int memoizedHashCode = 0;
22655     @java.lang.Override
22656     public int hashCode() {
22657       if (memoizedHashCode != 0) {
22658         return memoizedHashCode;
22659       }
22660       int hash = 41;
22661       hash = (19 * hash) + getDescriptorForType().hashCode();
22662       if (hasRow()) {
22663         hash = (37 * hash) + ROW_FIELD_NUMBER;
22664         hash = (53 * hash) + getRow().hashCode();
22665       }
22666       if (hasServiceName()) {
22667         hash = (37 * hash) + SERVICE_NAME_FIELD_NUMBER;
22668         hash = (53 * hash) + getServiceName().hashCode();
22669       }
22670       if (hasMethodName()) {
22671         hash = (37 * hash) + METHOD_NAME_FIELD_NUMBER;
22672         hash = (53 * hash) + getMethodName().hashCode();
22673       }
22674       if (hasRequest()) {
22675         hash = (37 * hash) + REQUEST_FIELD_NUMBER;
22676         hash = (53 * hash) + getRequest().hashCode();
22677       }
22678       hash = (29 * hash) + getUnknownFields().hashCode();
22679       memoizedHashCode = hash;
22680       return hash;
22681     }
22682 
22683     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom(
22684         com.google.protobuf.ByteString data)
22685         throws com.google.protobuf.InvalidProtocolBufferException {
22686       return PARSER.parseFrom(data);
22687     }
22688     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom(
22689         com.google.protobuf.ByteString data,
22690         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22691         throws com.google.protobuf.InvalidProtocolBufferException {
22692       return PARSER.parseFrom(data, extensionRegistry);
22693     }
22694     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom(byte[] data)
22695         throws com.google.protobuf.InvalidProtocolBufferException {
22696       return PARSER.parseFrom(data);
22697     }
22698     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom(
22699         byte[] data,
22700         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22701         throws com.google.protobuf.InvalidProtocolBufferException {
22702       return PARSER.parseFrom(data, extensionRegistry);
22703     }
22704     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom(java.io.InputStream input)
22705         throws java.io.IOException {
22706       return PARSER.parseFrom(input);
22707     }
22708     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom(
22709         java.io.InputStream input,
22710         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22711         throws java.io.IOException {
22712       return PARSER.parseFrom(input, extensionRegistry);
22713     }
22714     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseDelimitedFrom(java.io.InputStream input)
22715         throws java.io.IOException {
22716       return PARSER.parseDelimitedFrom(input);
22717     }
22718     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseDelimitedFrom(
22719         java.io.InputStream input,
22720         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22721         throws java.io.IOException {
22722       return PARSER.parseDelimitedFrom(input, extensionRegistry);
22723     }
22724     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom(
22725         com.google.protobuf.CodedInputStream input)
22726         throws java.io.IOException {
22727       return PARSER.parseFrom(input);
22728     }
22729     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom(
22730         com.google.protobuf.CodedInputStream input,
22731         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22732         throws java.io.IOException {
22733       return PARSER.parseFrom(input, extensionRegistry);
22734     }
22735 
22736     public static Builder newBuilder() { return Builder.create(); }
22737     public Builder newBuilderForType() { return newBuilder(); }
22738     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall prototype) {
22739       return newBuilder().mergeFrom(prototype);
22740     }
22741     public Builder toBuilder() { return newBuilder(this); }
22742 
22743     @java.lang.Override
22744     protected Builder newBuilderForType(
22745         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
22746       Builder builder = new Builder(parent);
22747       return builder;
22748     }
22749     /**
22750      * Protobuf type {@code hbase.pb.CoprocessorServiceCall}
22751      */
22752     public static final class Builder extends
22753         com.google.protobuf.GeneratedMessage.Builder<Builder>
22754        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder {
22755       public static final com.google.protobuf.Descriptors.Descriptor
22756           getDescriptor() {
22757         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceCall_descriptor;
22758       }
22759 
22760       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
22761           internalGetFieldAccessorTable() {
22762         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceCall_fieldAccessorTable
22763             .ensureFieldAccessorsInitialized(
22764                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder.class);
22765       }
22766 
22767       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.newBuilder()
22768       private Builder() {
22769         maybeForceBuilderInitialization();
22770       }
22771 
22772       private Builder(
22773           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
22774         super(parent);
22775         maybeForceBuilderInitialization();
22776       }
22777       private void maybeForceBuilderInitialization() {
22778         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
22779         }
22780       }
22781       private static Builder create() {
22782         return new Builder();
22783       }
22784 
22785       public Builder clear() {
22786         super.clear();
22787         row_ = com.google.protobuf.ByteString.EMPTY;
22788         bitField0_ = (bitField0_ & ~0x00000001);
22789         serviceName_ = "";
22790         bitField0_ = (bitField0_ & ~0x00000002);
22791         methodName_ = "";
22792         bitField0_ = (bitField0_ & ~0x00000004);
22793         request_ = com.google.protobuf.ByteString.EMPTY;
22794         bitField0_ = (bitField0_ & ~0x00000008);
22795         return this;
22796       }
22797 
22798       public Builder clone() {
22799         return create().mergeFrom(buildPartial());
22800       }
22801 
22802       public com.google.protobuf.Descriptors.Descriptor
22803           getDescriptorForType() {
22804         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceCall_descriptor;
22805       }
22806 
22807       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall getDefaultInstanceForType() {
22808         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance();
22809       }
22810 
22811       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall build() {
22812         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall result = buildPartial();
22813         if (!result.isInitialized()) {
22814           throw newUninitializedMessageException(result);
22815         }
22816         return result;
22817       }
22818 
22819       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall buildPartial() {
22820         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall(this);
22821         int from_bitField0_ = bitField0_;
22822         int to_bitField0_ = 0;
22823         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
22824           to_bitField0_ |= 0x00000001;
22825         }
22826         result.row_ = row_;
22827         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
22828           to_bitField0_ |= 0x00000002;
22829         }
22830         result.serviceName_ = serviceName_;
22831         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
22832           to_bitField0_ |= 0x00000004;
22833         }
22834         result.methodName_ = methodName_;
22835         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
22836           to_bitField0_ |= 0x00000008;
22837         }
22838         result.request_ = request_;
22839         result.bitField0_ = to_bitField0_;
22840         onBuilt();
22841         return result;
22842       }
22843 
22844       public Builder mergeFrom(com.google.protobuf.Message other) {
22845         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall) {
22846           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall)other);
22847         } else {
22848           super.mergeFrom(other);
22849           return this;
22850         }
22851       }
22852 
22853       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall other) {
22854         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance()) return this;
22855         if (other.hasRow()) {
22856           setRow(other.getRow());
22857         }
22858         if (other.hasServiceName()) {
22859           bitField0_ |= 0x00000002;
22860           serviceName_ = other.serviceName_;
22861           onChanged();
22862         }
22863         if (other.hasMethodName()) {
22864           bitField0_ |= 0x00000004;
22865           methodName_ = other.methodName_;
22866           onChanged();
22867         }
22868         if (other.hasRequest()) {
22869           setRequest(other.getRequest());
22870         }
22871         this.mergeUnknownFields(other.getUnknownFields());
22872         return this;
22873       }
22874 
22875       public final boolean isInitialized() {
22876         if (!hasRow()) {
22877           
22878           return false;
22879         }
22880         if (!hasServiceName()) {
22881           
22882           return false;
22883         }
22884         if (!hasMethodName()) {
22885           
22886           return false;
22887         }
22888         if (!hasRequest()) {
22889           
22890           return false;
22891         }
22892         return true;
22893       }
22894 
22895       public Builder mergeFrom(
22896           com.google.protobuf.CodedInputStream input,
22897           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22898           throws java.io.IOException {
22899         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parsedMessage = null;
22900         try {
22901           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
22902         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
22903           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall) e.getUnfinishedMessage();
22904           throw e;
22905         } finally {
22906           if (parsedMessage != null) {
22907             mergeFrom(parsedMessage);
22908           }
22909         }
22910         return this;
22911       }
22912       private int bitField0_;
22913 
22914       // required bytes row = 1;
22915       private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY;
22916       /**
22917        * <code>required bytes row = 1;</code>
22918        */
22919       public boolean hasRow() {
22920         return ((bitField0_ & 0x00000001) == 0x00000001);
22921       }
22922       /**
22923        * <code>required bytes row = 1;</code>
22924        */
22925       public com.google.protobuf.ByteString getRow() {
22926         return row_;
22927       }
22928       /**
22929        * <code>required bytes row = 1;</code>
22930        */
22931       public Builder setRow(com.google.protobuf.ByteString value) {
22932         if (value == null) {
22933     throw new NullPointerException();
22934   }
22935   bitField0_ |= 0x00000001;
22936         row_ = value;
22937         onChanged();
22938         return this;
22939       }
22940       /**
22941        * <code>required bytes row = 1;</code>
22942        */
22943       public Builder clearRow() {
22944         bitField0_ = (bitField0_ & ~0x00000001);
22945         row_ = getDefaultInstance().getRow();
22946         onChanged();
22947         return this;
22948       }
22949 
22950       // required string service_name = 2;
22951       private java.lang.Object serviceName_ = "";
22952       /**
22953        * <code>required string service_name = 2;</code>
22954        */
22955       public boolean hasServiceName() {
22956         return ((bitField0_ & 0x00000002) == 0x00000002);
22957       }
22958       /**
22959        * <code>required string service_name = 2;</code>
22960        */
22961       public java.lang.String getServiceName() {
22962         java.lang.Object ref = serviceName_;
22963         if (!(ref instanceof java.lang.String)) {
22964           java.lang.String s = ((com.google.protobuf.ByteString) ref)
22965               .toStringUtf8();
22966           serviceName_ = s;
22967           return s;
22968         } else {
22969           return (java.lang.String) ref;
22970         }
22971       }
22972       /**
22973        * <code>required string service_name = 2;</code>
22974        */
22975       public com.google.protobuf.ByteString
22976           getServiceNameBytes() {
22977         java.lang.Object ref = serviceName_;
22978         if (ref instanceof String) {
22979           com.google.protobuf.ByteString b = 
22980               com.google.protobuf.ByteString.copyFromUtf8(
22981                   (java.lang.String) ref);
22982           serviceName_ = b;
22983           return b;
22984         } else {
22985           return (com.google.protobuf.ByteString) ref;
22986         }
22987       }
22988       /**
22989        * <code>required string service_name = 2;</code>
22990        */
22991       public Builder setServiceName(
22992           java.lang.String value) {
22993         if (value == null) {
22994     throw new NullPointerException();
22995   }
22996   bitField0_ |= 0x00000002;
22997         serviceName_ = value;
22998         onChanged();
22999         return this;
23000       }
23001       /**
23002        * <code>required string service_name = 2;</code>
23003        */
23004       public Builder clearServiceName() {
23005         bitField0_ = (bitField0_ & ~0x00000002);
23006         serviceName_ = getDefaultInstance().getServiceName();
23007         onChanged();
23008         return this;
23009       }
23010       /**
23011        * <code>required string service_name = 2;</code>
23012        */
23013       public Builder setServiceNameBytes(
23014           com.google.protobuf.ByteString value) {
23015         if (value == null) {
23016     throw new NullPointerException();
23017   }
23018   bitField0_ |= 0x00000002;
23019         serviceName_ = value;
23020         onChanged();
23021         return this;
23022       }
23023 
23024       // required string method_name = 3;
23025       private java.lang.Object methodName_ = "";
23026       /**
23027        * <code>required string method_name = 3;</code>
23028        */
23029       public boolean hasMethodName() {
23030         return ((bitField0_ & 0x00000004) == 0x00000004);
23031       }
23032       /**
23033        * <code>required string method_name = 3;</code>
23034        */
23035       public java.lang.String getMethodName() {
23036         java.lang.Object ref = methodName_;
23037         if (!(ref instanceof java.lang.String)) {
23038           java.lang.String s = ((com.google.protobuf.ByteString) ref)
23039               .toStringUtf8();
23040           methodName_ = s;
23041           return s;
23042         } else {
23043           return (java.lang.String) ref;
23044         }
23045       }
23046       /**
23047        * <code>required string method_name = 3;</code>
23048        */
23049       public com.google.protobuf.ByteString
23050           getMethodNameBytes() {
23051         java.lang.Object ref = methodName_;
23052         if (ref instanceof String) {
23053           com.google.protobuf.ByteString b = 
23054               com.google.protobuf.ByteString.copyFromUtf8(
23055                   (java.lang.String) ref);
23056           methodName_ = b;
23057           return b;
23058         } else {
23059           return (com.google.protobuf.ByteString) ref;
23060         }
23061       }
23062       /**
23063        * <code>required string method_name = 3;</code>
23064        */
23065       public Builder setMethodName(
23066           java.lang.String value) {
23067         if (value == null) {
23068     throw new NullPointerException();
23069   }
23070   bitField0_ |= 0x00000004;
23071         methodName_ = value;
23072         onChanged();
23073         return this;
23074       }
23075       /**
23076        * <code>required string method_name = 3;</code>
23077        */
23078       public Builder clearMethodName() {
23079         bitField0_ = (bitField0_ & ~0x00000004);
23080         methodName_ = getDefaultInstance().getMethodName();
23081         onChanged();
23082         return this;
23083       }
23084       /**
23085        * <code>required string method_name = 3;</code>
23086        */
23087       public Builder setMethodNameBytes(
23088           com.google.protobuf.ByteString value) {
23089         if (value == null) {
23090     throw new NullPointerException();
23091   }
23092   bitField0_ |= 0x00000004;
23093         methodName_ = value;
23094         onChanged();
23095         return this;
23096       }
23097 
23098       // required bytes request = 4;
23099       private com.google.protobuf.ByteString request_ = com.google.protobuf.ByteString.EMPTY;
23100       /**
23101        * <code>required bytes request = 4;</code>
23102        */
23103       public boolean hasRequest() {
23104         return ((bitField0_ & 0x00000008) == 0x00000008);
23105       }
23106       /**
23107        * <code>required bytes request = 4;</code>
23108        */
23109       public com.google.protobuf.ByteString getRequest() {
23110         return request_;
23111       }
23112       /**
23113        * <code>required bytes request = 4;</code>
23114        */
23115       public Builder setRequest(com.google.protobuf.ByteString value) {
23116         if (value == null) {
23117     throw new NullPointerException();
23118   }
23119   bitField0_ |= 0x00000008;
23120         request_ = value;
23121         onChanged();
23122         return this;
23123       }
23124       /**
23125        * <code>required bytes request = 4;</code>
23126        */
23127       public Builder clearRequest() {
23128         bitField0_ = (bitField0_ & ~0x00000008);
23129         request_ = getDefaultInstance().getRequest();
23130         onChanged();
23131         return this;
23132       }
23133 
23134       // @@protoc_insertion_point(builder_scope:hbase.pb.CoprocessorServiceCall)
23135     }
23136 
23137     static {
23138       defaultInstance = new CoprocessorServiceCall(true);
23139       defaultInstance.initFields();
23140     }
23141 
23142     // @@protoc_insertion_point(class_scope:hbase.pb.CoprocessorServiceCall)
23143   }
23144 
23145   public interface CoprocessorServiceResultOrBuilder
23146       extends com.google.protobuf.MessageOrBuilder {
23147 
23148     // optional .hbase.pb.NameBytesPair value = 1;
23149     /**
23150      * <code>optional .hbase.pb.NameBytesPair value = 1;</code>
23151      */
23152     boolean hasValue();
23153     /**
23154      * <code>optional .hbase.pb.NameBytesPair value = 1;</code>
23155      */
23156     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue();
23157     /**
23158      * <code>optional .hbase.pb.NameBytesPair value = 1;</code>
23159      */
23160     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder();
23161   }
23162   /**
23163    * Protobuf type {@code hbase.pb.CoprocessorServiceResult}
23164    */
23165   public static final class CoprocessorServiceResult extends
23166       com.google.protobuf.GeneratedMessage
23167       implements CoprocessorServiceResultOrBuilder {
23168     // Use CoprocessorServiceResult.newBuilder() to construct.
23169     private CoprocessorServiceResult(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
23170       super(builder);
23171       this.unknownFields = builder.getUnknownFields();
23172     }
23173     private CoprocessorServiceResult(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
23174 
23175     private static final CoprocessorServiceResult defaultInstance;
23176     public static CoprocessorServiceResult getDefaultInstance() {
23177       return defaultInstance;
23178     }
23179 
23180     public CoprocessorServiceResult getDefaultInstanceForType() {
23181       return defaultInstance;
23182     }
23183 
23184     private final com.google.protobuf.UnknownFieldSet unknownFields;
23185     @java.lang.Override
23186     public final com.google.protobuf.UnknownFieldSet
23187         getUnknownFields() {
23188       return this.unknownFields;
23189     }
23190     private CoprocessorServiceResult(
23191         com.google.protobuf.CodedInputStream input,
23192         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23193         throws com.google.protobuf.InvalidProtocolBufferException {
23194       initFields();
23195       int mutable_bitField0_ = 0;
23196       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
23197           com.google.protobuf.UnknownFieldSet.newBuilder();
23198       try {
23199         boolean done = false;
23200         while (!done) {
23201           int tag = input.readTag();
23202           switch (tag) {
23203             case 0:
23204               done = true;
23205               break;
23206             default: {
23207               if (!parseUnknownField(input, unknownFields,
23208                                      extensionRegistry, tag)) {
23209                 done = true;
23210               }
23211               break;
23212             }
23213             case 10: {
23214               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = null;
23215               if (((bitField0_ & 0x00000001) == 0x00000001)) {
23216                 subBuilder = value_.toBuilder();
23217               }
23218               value_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.PARSER, extensionRegistry);
23219               if (subBuilder != null) {
23220                 subBuilder.mergeFrom(value_);
23221                 value_ = subBuilder.buildPartial();
23222               }
23223               bitField0_ |= 0x00000001;
23224               break;
23225             }
23226           }
23227         }
23228       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
23229         throw e.setUnfinishedMessage(this);
23230       } catch (java.io.IOException e) {
23231         throw new com.google.protobuf.InvalidProtocolBufferException(
23232             e.getMessage()).setUnfinishedMessage(this);
23233       } finally {
23234         this.unknownFields = unknownFields.build();
23235         makeExtensionsImmutable();
23236       }
23237     }
23238     public static final com.google.protobuf.Descriptors.Descriptor
23239         getDescriptor() {
23240       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceResult_descriptor;
23241     }
23242 
23243     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
23244         internalGetFieldAccessorTable() {
23245       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceResult_fieldAccessorTable
23246           .ensureFieldAccessorsInitialized(
23247               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.Builder.class);
23248     }
23249 
23250     public static com.google.protobuf.Parser<CoprocessorServiceResult> PARSER =
23251         new com.google.protobuf.AbstractParser<CoprocessorServiceResult>() {
23252       public CoprocessorServiceResult parsePartialFrom(
23253           com.google.protobuf.CodedInputStream input,
23254           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23255           throws com.google.protobuf.InvalidProtocolBufferException {
23256         return new CoprocessorServiceResult(input, extensionRegistry);
23257       }
23258     };
23259 
23260     @java.lang.Override
23261     public com.google.protobuf.Parser<CoprocessorServiceResult> getParserForType() {
23262       return PARSER;
23263     }
23264 
23265     private int bitField0_;
23266     // optional .hbase.pb.NameBytesPair value = 1;
23267     public static final int VALUE_FIELD_NUMBER = 1;
23268     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value_;
23269     /**
23270      * <code>optional .hbase.pb.NameBytesPair value = 1;</code>
23271      */
23272     public boolean hasValue() {
23273       return ((bitField0_ & 0x00000001) == 0x00000001);
23274     }
23275     /**
23276      * <code>optional .hbase.pb.NameBytesPair value = 1;</code>
23277      */
23278     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue() {
23279       return value_;
23280     }
23281     /**
23282      * <code>optional .hbase.pb.NameBytesPair value = 1;</code>
23283      */
23284     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder() {
23285       return value_;
23286     }
23287 
23288     private void initFields() {
23289       value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
23290     }
23291     private byte memoizedIsInitialized = -1;
23292     public final boolean isInitialized() {
23293       byte isInitialized = memoizedIsInitialized;
23294       if (isInitialized != -1) return isInitialized == 1;
23295 
23296       if (hasValue()) {
23297         if (!getValue().isInitialized()) {
23298           memoizedIsInitialized = 0;
23299           return false;
23300         }
23301       }
23302       memoizedIsInitialized = 1;
23303       return true;
23304     }
23305 
23306     public void writeTo(com.google.protobuf.CodedOutputStream output)
23307                         throws java.io.IOException {
23308       getSerializedSize();
23309       if (((bitField0_ & 0x00000001) == 0x00000001)) {
23310         output.writeMessage(1, value_);
23311       }
23312       getUnknownFields().writeTo(output);
23313     }
23314 
23315     private int memoizedSerializedSize = -1;
23316     public int getSerializedSize() {
23317       int size = memoizedSerializedSize;
23318       if (size != -1) return size;
23319 
23320       size = 0;
23321       if (((bitField0_ & 0x00000001) == 0x00000001)) {
23322         size += com.google.protobuf.CodedOutputStream
23323           .computeMessageSize(1, value_);
23324       }
23325       size += getUnknownFields().getSerializedSize();
23326       memoizedSerializedSize = size;
23327       return size;
23328     }
23329 
23330     private static final long serialVersionUID = 0L;
23331     @java.lang.Override
23332     protected java.lang.Object writeReplace()
23333         throws java.io.ObjectStreamException {
23334       return super.writeReplace();
23335     }
23336 
23337     @java.lang.Override
23338     public boolean equals(final java.lang.Object obj) {
23339       if (obj == this) {
23340        return true;
23341       }
23342       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult)) {
23343         return super.equals(obj);
23344       }
23345       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult) obj;
23346 
23347       boolean result = true;
23348       result = result && (hasValue() == other.hasValue());
23349       if (hasValue()) {
23350         result = result && getValue()
23351             .equals(other.getValue());
23352       }
23353       result = result &&
23354           getUnknownFields().equals(other.getUnknownFields());
23355       return result;
23356     }
23357 
23358     private int memoizedHashCode = 0;
23359     @java.lang.Override
23360     public int hashCode() {
23361       if (memoizedHashCode != 0) {
23362         return memoizedHashCode;
23363       }
23364       int hash = 41;
23365       hash = (19 * hash) + getDescriptorForType().hashCode();
23366       if (hasValue()) {
23367         hash = (37 * hash) + VALUE_FIELD_NUMBER;
23368         hash = (53 * hash) + getValue().hashCode();
23369       }
23370       hash = (29 * hash) + getUnknownFields().hashCode();
23371       memoizedHashCode = hash;
23372       return hash;
23373     }
23374 
23375     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult parseFrom(
23376         com.google.protobuf.ByteString data)
23377         throws com.google.protobuf.InvalidProtocolBufferException {
23378       return PARSER.parseFrom(data);
23379     }
23380     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult parseFrom(
23381         com.google.protobuf.ByteString data,
23382         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23383         throws com.google.protobuf.InvalidProtocolBufferException {
23384       return PARSER.parseFrom(data, extensionRegistry);
23385     }
23386     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult parseFrom(byte[] data)
23387         throws com.google.protobuf.InvalidProtocolBufferException {
23388       return PARSER.parseFrom(data);
23389     }
23390     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult parseFrom(
23391         byte[] data,
23392         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23393         throws com.google.protobuf.InvalidProtocolBufferException {
23394       return PARSER.parseFrom(data, extensionRegistry);
23395     }
23396     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult parseFrom(java.io.InputStream input)
23397         throws java.io.IOException {
23398       return PARSER.parseFrom(input);
23399     }
23400     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult parseFrom(
23401         java.io.InputStream input,
23402         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23403         throws java.io.IOException {
23404       return PARSER.parseFrom(input, extensionRegistry);
23405     }
23406     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult parseDelimitedFrom(java.io.InputStream input)
23407         throws java.io.IOException {
23408       return PARSER.parseDelimitedFrom(input);
23409     }
23410     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult parseDelimitedFrom(
23411         java.io.InputStream input,
23412         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23413         throws java.io.IOException {
23414       return PARSER.parseDelimitedFrom(input, extensionRegistry);
23415     }
23416     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult parseFrom(
23417         com.google.protobuf.CodedInputStream input)
23418         throws java.io.IOException {
23419       return PARSER.parseFrom(input);
23420     }
23421     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult parseFrom(
23422         com.google.protobuf.CodedInputStream input,
23423         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23424         throws java.io.IOException {
23425       return PARSER.parseFrom(input, extensionRegistry);
23426     }
23427 
23428     public static Builder newBuilder() { return Builder.create(); }
23429     public Builder newBuilderForType() { return newBuilder(); }
23430     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult prototype) {
23431       return newBuilder().mergeFrom(prototype);
23432     }
23433     public Builder toBuilder() { return newBuilder(this); }
23434 
23435     @java.lang.Override
23436     protected Builder newBuilderForType(
23437         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
23438       Builder builder = new Builder(parent);
23439       return builder;
23440     }
23441     /**
23442      * Protobuf type {@code hbase.pb.CoprocessorServiceResult}
23443      */
23444     public static final class Builder extends
23445         com.google.protobuf.GeneratedMessage.Builder<Builder>
23446        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResultOrBuilder {
23447       public static final com.google.protobuf.Descriptors.Descriptor
23448           getDescriptor() {
23449         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceResult_descriptor;
23450       }
23451 
23452       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
23453           internalGetFieldAccessorTable() {
23454         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceResult_fieldAccessorTable
23455             .ensureFieldAccessorsInitialized(
23456                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.Builder.class);
23457       }
23458 
23459       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.newBuilder()
23460       private Builder() {
23461         maybeForceBuilderInitialization();
23462       }
23463 
23464       private Builder(
23465           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
23466         super(parent);
23467         maybeForceBuilderInitialization();
23468       }
23469       private void maybeForceBuilderInitialization() {
23470         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
23471           getValueFieldBuilder();
23472         }
23473       }
23474       private static Builder create() {
23475         return new Builder();
23476       }
23477 
23478       public Builder clear() {
23479         super.clear();
23480         if (valueBuilder_ == null) {
23481           value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
23482         } else {
23483           valueBuilder_.clear();
23484         }
23485         bitField0_ = (bitField0_ & ~0x00000001);
23486         return this;
23487       }
23488 
23489       public Builder clone() {
23490         return create().mergeFrom(buildPartial());
23491       }
23492 
23493       public com.google.protobuf.Descriptors.Descriptor
23494           getDescriptorForType() {
23495         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceResult_descriptor;
23496       }
23497 
23498       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult getDefaultInstanceForType() {
23499         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.getDefaultInstance();
23500       }
23501 
23502       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult build() {
23503         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult result = buildPartial();
23504         if (!result.isInitialized()) {
23505           throw newUninitializedMessageException(result);
23506         }
23507         return result;
23508       }
23509 
23510       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult buildPartial() {
23511         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult(this);
23512         int from_bitField0_ = bitField0_;
23513         int to_bitField0_ = 0;
23514         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
23515           to_bitField0_ |= 0x00000001;
23516         }
23517         if (valueBuilder_ == null) {
23518           result.value_ = value_;
23519         } else {
23520           result.value_ = valueBuilder_.build();
23521         }
23522         result.bitField0_ = to_bitField0_;
23523         onBuilt();
23524         return result;
23525       }
23526 
23527       public Builder mergeFrom(com.google.protobuf.Message other) {
23528         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult) {
23529           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult)other);
23530         } else {
23531           super.mergeFrom(other);
23532           return this;
23533         }
23534       }
23535 
23536       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult other) {
23537         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.getDefaultInstance()) return this;
23538         if (other.hasValue()) {
23539           mergeValue(other.getValue());
23540         }
23541         this.mergeUnknownFields(other.getUnknownFields());
23542         return this;
23543       }
23544 
23545       public final boolean isInitialized() {
23546         if (hasValue()) {
23547           if (!getValue().isInitialized()) {
23548             
23549             return false;
23550           }
23551         }
23552         return true;
23553       }
23554 
23555       public Builder mergeFrom(
23556           com.google.protobuf.CodedInputStream input,
23557           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23558           throws java.io.IOException {
23559         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult parsedMessage = null;
23560         try {
23561           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
23562         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
23563           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult) e.getUnfinishedMessage();
23564           throw e;
23565         } finally {
23566           if (parsedMessage != null) {
23567             mergeFrom(parsedMessage);
23568           }
23569         }
23570         return this;
23571       }
23572       private int bitField0_;
23573 
23574       // optional .hbase.pb.NameBytesPair value = 1;
23575       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
23576       private com.google.protobuf.SingleFieldBuilder<
23577           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> valueBuilder_;
23578       /**
23579        * <code>optional .hbase.pb.NameBytesPair value = 1;</code>
23580        */
23581       public boolean hasValue() {
23582         return ((bitField0_ & 0x00000001) == 0x00000001);
23583       }
23584       /**
23585        * <code>optional .hbase.pb.NameBytesPair value = 1;</code>
23586        */
23587       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue() {
23588         if (valueBuilder_ == null) {
23589           return value_;
23590         } else {
23591           return valueBuilder_.getMessage();
23592         }
23593       }
23594       /**
23595        * <code>optional .hbase.pb.NameBytesPair value = 1;</code>
23596        */
23597       public Builder setValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
23598         if (valueBuilder_ == null) {
23599           if (value == null) {
23600             throw new NullPointerException();
23601           }
23602           value_ = value;
23603           onChanged();
23604         } else {
23605           valueBuilder_.setMessage(value);
23606         }
23607         bitField0_ |= 0x00000001;
23608         return this;
23609       }
23610       /**
23611        * <code>optional .hbase.pb.NameBytesPair value = 1;</code>
23612        */
23613       public Builder setValue(
23614           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
23615         if (valueBuilder_ == null) {
23616           value_ = builderForValue.build();
23617           onChanged();
23618         } else {
23619           valueBuilder_.setMessage(builderForValue.build());
23620         }
23621         bitField0_ |= 0x00000001;
23622         return this;
23623       }
23624       /**
23625        * <code>optional .hbase.pb.NameBytesPair value = 1;</code>
23626        */
23627       public Builder mergeValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
23628         if (valueBuilder_ == null) {
23629           if (((bitField0_ & 0x00000001) == 0x00000001) &&
23630               value_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) {
23631             value_ =
23632               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(value_).mergeFrom(value).buildPartial();
23633           } else {
23634             value_ = value;
23635           }
23636           onChanged();
23637         } else {
23638           valueBuilder_.mergeFrom(value);
23639         }
23640         bitField0_ |= 0x00000001;
23641         return this;
23642       }
23643       /**
23644        * <code>optional .hbase.pb.NameBytesPair value = 1;</code>
23645        */
23646       public Builder clearValue() {
23647         if (valueBuilder_ == null) {
23648           value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
23649           onChanged();
23650         } else {
23651           valueBuilder_.clear();
23652         }
23653         bitField0_ = (bitField0_ & ~0x00000001);
23654         return this;
23655       }
23656       /**
23657        * <code>optional .hbase.pb.NameBytesPair value = 1;</code>
23658        */
23659       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getValueBuilder() {
23660         bitField0_ |= 0x00000001;
23661         onChanged();
23662         return getValueFieldBuilder().getBuilder();
23663       }
23664       /**
23665        * <code>optional .hbase.pb.NameBytesPair value = 1;</code>
23666        */
23667       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder() {
23668         if (valueBuilder_ != null) {
23669           return valueBuilder_.getMessageOrBuilder();
23670         } else {
23671           return value_;
23672         }
23673       }
23674       /**
23675        * <code>optional .hbase.pb.NameBytesPair value = 1;</code>
23676        */
23677       private com.google.protobuf.SingleFieldBuilder<
23678           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> 
23679           getValueFieldBuilder() {
23680         if (valueBuilder_ == null) {
23681           valueBuilder_ = new com.google.protobuf.SingleFieldBuilder<
23682               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>(
23683                   value_,
23684                   getParentForChildren(),
23685                   isClean());
23686           value_ = null;
23687         }
23688         return valueBuilder_;
23689       }
23690 
23691       // @@protoc_insertion_point(builder_scope:hbase.pb.CoprocessorServiceResult)
23692     }
23693 
23694     static {
23695       defaultInstance = new CoprocessorServiceResult(true);
23696       defaultInstance.initFields();
23697     }
23698 
23699     // @@protoc_insertion_point(class_scope:hbase.pb.CoprocessorServiceResult)
23700   }
23701 
23702   public interface CoprocessorServiceRequestOrBuilder
23703       extends com.google.protobuf.MessageOrBuilder {
23704 
23705     // required .hbase.pb.RegionSpecifier region = 1;
23706     /**
23707      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
23708      */
23709     boolean hasRegion();
23710     /**
23711      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
23712      */
23713     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
23714     /**
23715      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
23716      */
23717     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
23718 
23719     // required .hbase.pb.CoprocessorServiceCall call = 2;
23720     /**
23721      * <code>required .hbase.pb.CoprocessorServiceCall call = 2;</code>
23722      */
23723     boolean hasCall();
23724     /**
23725      * <code>required .hbase.pb.CoprocessorServiceCall call = 2;</code>
23726      */
23727     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall getCall();
23728     /**
23729      * <code>required .hbase.pb.CoprocessorServiceCall call = 2;</code>
23730      */
23731     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder getCallOrBuilder();
23732   }
23733   /**
23734    * Protobuf type {@code hbase.pb.CoprocessorServiceRequest}
23735    */
23736   public static final class CoprocessorServiceRequest extends
23737       com.google.protobuf.GeneratedMessage
23738       implements CoprocessorServiceRequestOrBuilder {
23739     // Use CoprocessorServiceRequest.newBuilder() to construct.
23740     private CoprocessorServiceRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
23741       super(builder);
23742       this.unknownFields = builder.getUnknownFields();
23743     }
23744     private CoprocessorServiceRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
23745 
23746     private static final CoprocessorServiceRequest defaultInstance;
23747     public static CoprocessorServiceRequest getDefaultInstance() {
23748       return defaultInstance;
23749     }
23750 
23751     public CoprocessorServiceRequest getDefaultInstanceForType() {
23752       return defaultInstance;
23753     }
23754 
23755     private final com.google.protobuf.UnknownFieldSet unknownFields;
23756     @java.lang.Override
23757     public final com.google.protobuf.UnknownFieldSet
23758         getUnknownFields() {
23759       return this.unknownFields;
23760     }
23761     private CoprocessorServiceRequest(
23762         com.google.protobuf.CodedInputStream input,
23763         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23764         throws com.google.protobuf.InvalidProtocolBufferException {
23765       initFields();
23766       int mutable_bitField0_ = 0;
23767       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
23768           com.google.protobuf.UnknownFieldSet.newBuilder();
23769       try {
23770         boolean done = false;
23771         while (!done) {
23772           int tag = input.readTag();
23773           switch (tag) {
23774             case 0:
23775               done = true;
23776               break;
23777             default: {
23778               if (!parseUnknownField(input, unknownFields,
23779                                      extensionRegistry, tag)) {
23780                 done = true;
23781               }
23782               break;
23783             }
23784             case 10: {
23785               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null;
23786               if (((bitField0_ & 0x00000001) == 0x00000001)) {
23787                 subBuilder = region_.toBuilder();
23788               }
23789               region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry);
23790               if (subBuilder != null) {
23791                 subBuilder.mergeFrom(region_);
23792                 region_ = subBuilder.buildPartial();
23793               }
23794               bitField0_ |= 0x00000001;
23795               break;
23796             }
23797             case 18: {
23798               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder subBuilder = null;
23799               if (((bitField0_ & 0x00000002) == 0x00000002)) {
23800                 subBuilder = call_.toBuilder();
23801               }
23802               call_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.PARSER, extensionRegistry);
23803               if (subBuilder != null) {
23804                 subBuilder.mergeFrom(call_);
23805                 call_ = subBuilder.buildPartial();
23806               }
23807               bitField0_ |= 0x00000002;
23808               break;
23809             }
23810           }
23811         }
23812       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
23813         throw e.setUnfinishedMessage(this);
23814       } catch (java.io.IOException e) {
23815         throw new com.google.protobuf.InvalidProtocolBufferException(
23816             e.getMessage()).setUnfinishedMessage(this);
23817       } finally {
23818         this.unknownFields = unknownFields.build();
23819         makeExtensionsImmutable();
23820       }
23821     }
23822     public static final com.google.protobuf.Descriptors.Descriptor
23823         getDescriptor() {
23824       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceRequest_descriptor;
23825     }
23826 
23827     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
23828         internalGetFieldAccessorTable() {
23829       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceRequest_fieldAccessorTable
23830           .ensureFieldAccessorsInitialized(
23831               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.Builder.class);
23832     }
23833 
23834     public static com.google.protobuf.Parser<CoprocessorServiceRequest> PARSER =
23835         new com.google.protobuf.AbstractParser<CoprocessorServiceRequest>() {
23836       public CoprocessorServiceRequest parsePartialFrom(
23837           com.google.protobuf.CodedInputStream input,
23838           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23839           throws com.google.protobuf.InvalidProtocolBufferException {
23840         return new CoprocessorServiceRequest(input, extensionRegistry);
23841       }
23842     };
23843 
23844     @java.lang.Override
23845     public com.google.protobuf.Parser<CoprocessorServiceRequest> getParserForType() {
23846       return PARSER;
23847     }
23848 
23849     private int bitField0_;
23850     // required .hbase.pb.RegionSpecifier region = 1;
23851     public static final int REGION_FIELD_NUMBER = 1;
23852     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_;
23853     /**
23854      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
23855      */
23856     public boolean hasRegion() {
23857       return ((bitField0_ & 0x00000001) == 0x00000001);
23858     }
23859     /**
23860      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
23861      */
23862     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
23863       return region_;
23864     }
23865     /**
23866      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
23867      */
23868     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
23869       return region_;
23870     }
23871 
23872     // required .hbase.pb.CoprocessorServiceCall call = 2;
23873     public static final int CALL_FIELD_NUMBER = 2;
23874     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall call_;
23875     /**
23876      * <code>required .hbase.pb.CoprocessorServiceCall call = 2;</code>
23877      */
23878     public boolean hasCall() {
23879       return ((bitField0_ & 0x00000002) == 0x00000002);
23880     }
23881     /**
23882      * <code>required .hbase.pb.CoprocessorServiceCall call = 2;</code>
23883      */
23884     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall getCall() {
23885       return call_;
23886     }
23887     /**
23888      * <code>required .hbase.pb.CoprocessorServiceCall call = 2;</code>
23889      */
23890     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder getCallOrBuilder() {
23891       return call_;
23892     }
23893 
23894     private void initFields() {
23895       region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
23896       call_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance();
23897     }
23898     private byte memoizedIsInitialized = -1;
23899     public final boolean isInitialized() {
23900       byte isInitialized = memoizedIsInitialized;
23901       if (isInitialized != -1) return isInitialized == 1;
23902 
23903       if (!hasRegion()) {
23904         memoizedIsInitialized = 0;
23905         return false;
23906       }
23907       if (!hasCall()) {
23908         memoizedIsInitialized = 0;
23909         return false;
23910       }
23911       if (!getRegion().isInitialized()) {
23912         memoizedIsInitialized = 0;
23913         return false;
23914       }
23915       if (!getCall().isInitialized()) {
23916         memoizedIsInitialized = 0;
23917         return false;
23918       }
23919       memoizedIsInitialized = 1;
23920       return true;
23921     }
23922 
23923     public void writeTo(com.google.protobuf.CodedOutputStream output)
23924                         throws java.io.IOException {
23925       getSerializedSize();
23926       if (((bitField0_ & 0x00000001) == 0x00000001)) {
23927         output.writeMessage(1, region_);
23928       }
23929       if (((bitField0_ & 0x00000002) == 0x00000002)) {
23930         output.writeMessage(2, call_);
23931       }
23932       getUnknownFields().writeTo(output);
23933     }
23934 
23935     private int memoizedSerializedSize = -1;
23936     public int getSerializedSize() {
23937       int size = memoizedSerializedSize;
23938       if (size != -1) return size;
23939 
23940       size = 0;
23941       if (((bitField0_ & 0x00000001) == 0x00000001)) {
23942         size += com.google.protobuf.CodedOutputStream
23943           .computeMessageSize(1, region_);
23944       }
23945       if (((bitField0_ & 0x00000002) == 0x00000002)) {
23946         size += com.google.protobuf.CodedOutputStream
23947           .computeMessageSize(2, call_);
23948       }
23949       size += getUnknownFields().getSerializedSize();
23950       memoizedSerializedSize = size;
23951       return size;
23952     }
23953 
23954     private static final long serialVersionUID = 0L;
23955     @java.lang.Override
23956     protected java.lang.Object writeReplace()
23957         throws java.io.ObjectStreamException {
23958       return super.writeReplace();
23959     }
23960 
23961     @java.lang.Override
23962     public boolean equals(final java.lang.Object obj) {
23963       if (obj == this) {
23964        return true;
23965       }
23966       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)) {
23967         return super.equals(obj);
23968       }
23969       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest) obj;
23970 
23971       boolean result = true;
23972       result = result && (hasRegion() == other.hasRegion());
23973       if (hasRegion()) {
23974         result = result && getRegion()
23975             .equals(other.getRegion());
23976       }
23977       result = result && (hasCall() == other.hasCall());
23978       if (hasCall()) {
23979         result = result && getCall()
23980             .equals(other.getCall());
23981       }
23982       result = result &&
23983           getUnknownFields().equals(other.getUnknownFields());
23984       return result;
23985     }
23986 
23987     private int memoizedHashCode = 0;
23988     @java.lang.Override
23989     public int hashCode() {
23990       if (memoizedHashCode != 0) {
23991         return memoizedHashCode;
23992       }
23993       int hash = 41;
23994       hash = (19 * hash) + getDescriptorForType().hashCode();
23995       if (hasRegion()) {
23996         hash = (37 * hash) + REGION_FIELD_NUMBER;
23997         hash = (53 * hash) + getRegion().hashCode();
23998       }
23999       if (hasCall()) {
24000         hash = (37 * hash) + CALL_FIELD_NUMBER;
24001         hash = (53 * hash) + getCall().hashCode();
24002       }
24003       hash = (29 * hash) + getUnknownFields().hashCode();
24004       memoizedHashCode = hash;
24005       return hash;
24006     }
24007 
24008     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom(
24009         com.google.protobuf.ByteString data)
24010         throws com.google.protobuf.InvalidProtocolBufferException {
24011       return PARSER.parseFrom(data);
24012     }
24013     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom(
24014         com.google.protobuf.ByteString data,
24015         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24016         throws com.google.protobuf.InvalidProtocolBufferException {
24017       return PARSER.parseFrom(data, extensionRegistry);
24018     }
24019     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom(byte[] data)
24020         throws com.google.protobuf.InvalidProtocolBufferException {
24021       return PARSER.parseFrom(data);
24022     }
24023     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom(
24024         byte[] data,
24025         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24026         throws com.google.protobuf.InvalidProtocolBufferException {
24027       return PARSER.parseFrom(data, extensionRegistry);
24028     }
24029     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom(java.io.InputStream input)
24030         throws java.io.IOException {
24031       return PARSER.parseFrom(input);
24032     }
24033     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom(
24034         java.io.InputStream input,
24035         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24036         throws java.io.IOException {
24037       return PARSER.parseFrom(input, extensionRegistry);
24038     }
24039     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseDelimitedFrom(java.io.InputStream input)
24040         throws java.io.IOException {
24041       return PARSER.parseDelimitedFrom(input);
24042     }
24043     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseDelimitedFrom(
24044         java.io.InputStream input,
24045         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24046         throws java.io.IOException {
24047       return PARSER.parseDelimitedFrom(input, extensionRegistry);
24048     }
24049     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom(
24050         com.google.protobuf.CodedInputStream input)
24051         throws java.io.IOException {
24052       return PARSER.parseFrom(input);
24053     }
24054     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom(
24055         com.google.protobuf.CodedInputStream input,
24056         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24057         throws java.io.IOException {
24058       return PARSER.parseFrom(input, extensionRegistry);
24059     }
24060 
24061     public static Builder newBuilder() { return Builder.create(); }
24062     public Builder newBuilderForType() { return newBuilder(); }
24063     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest prototype) {
24064       return newBuilder().mergeFrom(prototype);
24065     }
24066     public Builder toBuilder() { return newBuilder(this); }
24067 
24068     @java.lang.Override
24069     protected Builder newBuilderForType(
24070         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
24071       Builder builder = new Builder(parent);
24072       return builder;
24073     }
24074     /**
24075      * Protobuf type {@code hbase.pb.CoprocessorServiceRequest}
24076      */
24077     public static final class Builder extends
24078         com.google.protobuf.GeneratedMessage.Builder<Builder>
24079        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequestOrBuilder {
24080       public static final com.google.protobuf.Descriptors.Descriptor
24081           getDescriptor() {
24082         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceRequest_descriptor;
24083       }
24084 
24085       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
24086           internalGetFieldAccessorTable() {
24087         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceRequest_fieldAccessorTable
24088             .ensureFieldAccessorsInitialized(
24089                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.Builder.class);
24090       }
24091 
24092       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.newBuilder()
24093       private Builder() {
24094         maybeForceBuilderInitialization();
24095       }
24096 
24097       private Builder(
24098           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
24099         super(parent);
24100         maybeForceBuilderInitialization();
24101       }
24102       private void maybeForceBuilderInitialization() {
24103         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
24104           getRegionFieldBuilder();
24105           getCallFieldBuilder();
24106         }
24107       }
24108       private static Builder create() {
24109         return new Builder();
24110       }
24111 
24112       public Builder clear() {
24113         super.clear();
24114         if (regionBuilder_ == null) {
24115           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
24116         } else {
24117           regionBuilder_.clear();
24118         }
24119         bitField0_ = (bitField0_ & ~0x00000001);
24120         if (callBuilder_ == null) {
24121           call_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance();
24122         } else {
24123           callBuilder_.clear();
24124         }
24125         bitField0_ = (bitField0_ & ~0x00000002);
24126         return this;
24127       }
24128 
24129       public Builder clone() {
24130         return create().mergeFrom(buildPartial());
24131       }
24132 
24133       public com.google.protobuf.Descriptors.Descriptor
24134           getDescriptorForType() {
24135         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceRequest_descriptor;
24136       }
24137 
24138       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest getDefaultInstanceForType() {
24139         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance();
24140       }
24141 
24142       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest build() {
24143         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest result = buildPartial();
24144         if (!result.isInitialized()) {
24145           throw newUninitializedMessageException(result);
24146         }
24147         return result;
24148       }
24149 
24150       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest buildPartial() {
24151         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest(this);
24152         int from_bitField0_ = bitField0_;
24153         int to_bitField0_ = 0;
24154         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
24155           to_bitField0_ |= 0x00000001;
24156         }
24157         if (regionBuilder_ == null) {
24158           result.region_ = region_;
24159         } else {
24160           result.region_ = regionBuilder_.build();
24161         }
24162         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
24163           to_bitField0_ |= 0x00000002;
24164         }
24165         if (callBuilder_ == null) {
24166           result.call_ = call_;
24167         } else {
24168           result.call_ = callBuilder_.build();
24169         }
24170         result.bitField0_ = to_bitField0_;
24171         onBuilt();
24172         return result;
24173       }
24174 
24175       public Builder mergeFrom(com.google.protobuf.Message other) {
24176         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest) {
24177           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)other);
24178         } else {
24179           super.mergeFrom(other);
24180           return this;
24181         }
24182       }
24183 
24184       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest other) {
24185         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance()) return this;
24186         if (other.hasRegion()) {
24187           mergeRegion(other.getRegion());
24188         }
24189         if (other.hasCall()) {
24190           mergeCall(other.getCall());
24191         }
24192         this.mergeUnknownFields(other.getUnknownFields());
24193         return this;
24194       }
24195 
24196       public final boolean isInitialized() {
24197         if (!hasRegion()) {
24198           
24199           return false;
24200         }
24201         if (!hasCall()) {
24202           
24203           return false;
24204         }
24205         if (!getRegion().isInitialized()) {
24206           
24207           return false;
24208         }
24209         if (!getCall().isInitialized()) {
24210           
24211           return false;
24212         }
24213         return true;
24214       }
24215 
24216       public Builder mergeFrom(
24217           com.google.protobuf.CodedInputStream input,
24218           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24219           throws java.io.IOException {
24220         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parsedMessage = null;
24221         try {
24222           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
24223         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
24224           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest) e.getUnfinishedMessage();
24225           throw e;
24226         } finally {
24227           if (parsedMessage != null) {
24228             mergeFrom(parsedMessage);
24229           }
24230         }
24231         return this;
24232       }
24233       private int bitField0_;
24234 
24235       // required .hbase.pb.RegionSpecifier region = 1;
24236       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
24237       private com.google.protobuf.SingleFieldBuilder<
24238           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_;
24239       /**
24240        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
24241        */
24242       public boolean hasRegion() {
24243         return ((bitField0_ & 0x00000001) == 0x00000001);
24244       }
24245       /**
24246        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
24247        */
24248       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
24249         if (regionBuilder_ == null) {
24250           return region_;
24251         } else {
24252           return regionBuilder_.getMessage();
24253         }
24254       }
24255       /**
24256        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
24257        */
24258       public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
24259         if (regionBuilder_ == null) {
24260           if (value == null) {
24261             throw new NullPointerException();
24262           }
24263           region_ = value;
24264           onChanged();
24265         } else {
24266           regionBuilder_.setMessage(value);
24267         }
24268         bitField0_ |= 0x00000001;
24269         return this;
24270       }
24271       /**
24272        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
24273        */
24274       public Builder setRegion(
24275           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
24276         if (regionBuilder_ == null) {
24277           region_ = builderForValue.build();
24278           onChanged();
24279         } else {
24280           regionBuilder_.setMessage(builderForValue.build());
24281         }
24282         bitField0_ |= 0x00000001;
24283         return this;
24284       }
24285       /**
24286        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
24287        */
24288       public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
24289         if (regionBuilder_ == null) {
24290           if (((bitField0_ & 0x00000001) == 0x00000001) &&
24291               region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) {
24292             region_ =
24293               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial();
24294           } else {
24295             region_ = value;
24296           }
24297           onChanged();
24298         } else {
24299           regionBuilder_.mergeFrom(value);
24300         }
24301         bitField0_ |= 0x00000001;
24302         return this;
24303       }
24304       /**
24305        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
24306        */
24307       public Builder clearRegion() {
24308         if (regionBuilder_ == null) {
24309           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
24310           onChanged();
24311         } else {
24312           regionBuilder_.clear();
24313         }
24314         bitField0_ = (bitField0_ & ~0x00000001);
24315         return this;
24316       }
24317       /**
24318        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
24319        */
24320       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() {
24321         bitField0_ |= 0x00000001;
24322         onChanged();
24323         return getRegionFieldBuilder().getBuilder();
24324       }
24325       /**
24326        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
24327        */
24328       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
24329         if (regionBuilder_ != null) {
24330           return regionBuilder_.getMessageOrBuilder();
24331         } else {
24332           return region_;
24333         }
24334       }
24335       /**
24336        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
24337        */
24338       private com.google.protobuf.SingleFieldBuilder<
24339           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> 
24340           getRegionFieldBuilder() {
24341         if (regionBuilder_ == null) {
24342           regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
24343               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
24344                   region_,
24345                   getParentForChildren(),
24346                   isClean());
24347           region_ = null;
24348         }
24349         return regionBuilder_;
24350       }
24351 
24352       // required .hbase.pb.CoprocessorServiceCall call = 2;
24353       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall call_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance();
24354       private com.google.protobuf.SingleFieldBuilder<
24355           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder> callBuilder_;
24356       /**
24357        * <code>required .hbase.pb.CoprocessorServiceCall call = 2;</code>
24358        */
24359       public boolean hasCall() {
24360         return ((bitField0_ & 0x00000002) == 0x00000002);
24361       }
24362       /**
24363        * <code>required .hbase.pb.CoprocessorServiceCall call = 2;</code>
24364        */
24365       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall getCall() {
24366         if (callBuilder_ == null) {
24367           return call_;
24368         } else {
24369           return callBuilder_.getMessage();
24370         }
24371       }
24372       /**
24373        * <code>required .hbase.pb.CoprocessorServiceCall call = 2;</code>
24374        */
24375       public Builder setCall(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall value) {
24376         if (callBuilder_ == null) {
24377           if (value == null) {
24378             throw new NullPointerException();
24379           }
24380           call_ = value;
24381           onChanged();
24382         } else {
24383           callBuilder_.setMessage(value);
24384         }
24385         bitField0_ |= 0x00000002;
24386         return this;
24387       }
24388       /**
24389        * <code>required .hbase.pb.CoprocessorServiceCall call = 2;</code>
24390        */
24391       public Builder setCall(
24392           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder builderForValue) {
24393         if (callBuilder_ == null) {
24394           call_ = builderForValue.build();
24395           onChanged();
24396         } else {
24397           callBuilder_.setMessage(builderForValue.build());
24398         }
24399         bitField0_ |= 0x00000002;
24400         return this;
24401       }
24402       /**
24403        * <code>required .hbase.pb.CoprocessorServiceCall call = 2;</code>
24404        */
24405       public Builder mergeCall(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall value) {
24406         if (callBuilder_ == null) {
24407           if (((bitField0_ & 0x00000002) == 0x00000002) &&
24408               call_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance()) {
24409             call_ =
24410               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.newBuilder(call_).mergeFrom(value).buildPartial();
24411           } else {
24412             call_ = value;
24413           }
24414           onChanged();
24415         } else {
24416           callBuilder_.mergeFrom(value);
24417         }
24418         bitField0_ |= 0x00000002;
24419         return this;
24420       }
24421       /**
24422        * <code>required .hbase.pb.CoprocessorServiceCall call = 2;</code>
24423        */
24424       public Builder clearCall() {
24425         if (callBuilder_ == null) {
24426           call_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance();
24427           onChanged();
24428         } else {
24429           callBuilder_.clear();
24430         }
24431         bitField0_ = (bitField0_ & ~0x00000002);
24432         return this;
24433       }
24434       /**
24435        * <code>required .hbase.pb.CoprocessorServiceCall call = 2;</code>
24436        */
24437       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder getCallBuilder() {
24438         bitField0_ |= 0x00000002;
24439         onChanged();
24440         return getCallFieldBuilder().getBuilder();
24441       }
24442       /**
24443        * <code>required .hbase.pb.CoprocessorServiceCall call = 2;</code>
24444        */
24445       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder getCallOrBuilder() {
24446         if (callBuilder_ != null) {
24447           return callBuilder_.getMessageOrBuilder();
24448         } else {
24449           return call_;
24450         }
24451       }
24452       /**
24453        * <code>required .hbase.pb.CoprocessorServiceCall call = 2;</code>
24454        */
24455       private com.google.protobuf.SingleFieldBuilder<
24456           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder> 
24457           getCallFieldBuilder() {
24458         if (callBuilder_ == null) {
24459           callBuilder_ = new com.google.protobuf.SingleFieldBuilder<
24460               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder>(
24461                   call_,
24462                   getParentForChildren(),
24463                   isClean());
24464           call_ = null;
24465         }
24466         return callBuilder_;
24467       }
24468 
24469       // @@protoc_insertion_point(builder_scope:hbase.pb.CoprocessorServiceRequest)
24470     }
24471 
24472     static {
24473       defaultInstance = new CoprocessorServiceRequest(true);
24474       defaultInstance.initFields();
24475     }
24476 
24477     // @@protoc_insertion_point(class_scope:hbase.pb.CoprocessorServiceRequest)
24478   }
24479 
24480   public interface CoprocessorServiceResponseOrBuilder
24481       extends com.google.protobuf.MessageOrBuilder {
24482 
24483     // required .hbase.pb.RegionSpecifier region = 1;
24484     /**
24485      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
24486      */
24487     boolean hasRegion();
24488     /**
24489      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
24490      */
24491     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
24492     /**
24493      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
24494      */
24495     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
24496 
24497     // required .hbase.pb.NameBytesPair value = 2;
24498     /**
24499      * <code>required .hbase.pb.NameBytesPair value = 2;</code>
24500      */
24501     boolean hasValue();
24502     /**
24503      * <code>required .hbase.pb.NameBytesPair value = 2;</code>
24504      */
24505     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue();
24506     /**
24507      * <code>required .hbase.pb.NameBytesPair value = 2;</code>
24508      */
24509     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder();
24510   }
24511   /**
24512    * Protobuf type {@code hbase.pb.CoprocessorServiceResponse}
24513    */
24514   public static final class CoprocessorServiceResponse extends
24515       com.google.protobuf.GeneratedMessage
24516       implements CoprocessorServiceResponseOrBuilder {
24517     // Use CoprocessorServiceResponse.newBuilder() to construct.
24518     private CoprocessorServiceResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
24519       super(builder);
24520       this.unknownFields = builder.getUnknownFields();
24521     }
24522     private CoprocessorServiceResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
24523 
24524     private static final CoprocessorServiceResponse defaultInstance;
24525     public static CoprocessorServiceResponse getDefaultInstance() {
24526       return defaultInstance;
24527     }
24528 
24529     public CoprocessorServiceResponse getDefaultInstanceForType() {
24530       return defaultInstance;
24531     }
24532 
24533     private final com.google.protobuf.UnknownFieldSet unknownFields;
24534     @java.lang.Override
24535     public final com.google.protobuf.UnknownFieldSet
24536         getUnknownFields() {
24537       return this.unknownFields;
24538     }
24539     private CoprocessorServiceResponse(
24540         com.google.protobuf.CodedInputStream input,
24541         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24542         throws com.google.protobuf.InvalidProtocolBufferException {
24543       initFields();
24544       int mutable_bitField0_ = 0;
24545       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
24546           com.google.protobuf.UnknownFieldSet.newBuilder();
24547       try {
24548         boolean done = false;
24549         while (!done) {
24550           int tag = input.readTag();
24551           switch (tag) {
24552             case 0:
24553               done = true;
24554               break;
24555             default: {
24556               if (!parseUnknownField(input, unknownFields,
24557                                      extensionRegistry, tag)) {
24558                 done = true;
24559               }
24560               break;
24561             }
24562             case 10: {
24563               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null;
24564               if (((bitField0_ & 0x00000001) == 0x00000001)) {
24565                 subBuilder = region_.toBuilder();
24566               }
24567               region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry);
24568               if (subBuilder != null) {
24569                 subBuilder.mergeFrom(region_);
24570                 region_ = subBuilder.buildPartial();
24571               }
24572               bitField0_ |= 0x00000001;
24573               break;
24574             }
24575             case 18: {
24576               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = null;
24577               if (((bitField0_ & 0x00000002) == 0x00000002)) {
24578                 subBuilder = value_.toBuilder();
24579               }
24580               value_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.PARSER, extensionRegistry);
24581               if (subBuilder != null) {
24582                 subBuilder.mergeFrom(value_);
24583                 value_ = subBuilder.buildPartial();
24584               }
24585               bitField0_ |= 0x00000002;
24586               break;
24587             }
24588           }
24589         }
24590       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
24591         throw e.setUnfinishedMessage(this);
24592       } catch (java.io.IOException e) {
24593         throw new com.google.protobuf.InvalidProtocolBufferException(
24594             e.getMessage()).setUnfinishedMessage(this);
24595       } finally {
24596         this.unknownFields = unknownFields.build();
24597         makeExtensionsImmutable();
24598       }
24599     }
24600     public static final com.google.protobuf.Descriptors.Descriptor
24601         getDescriptor() {
24602       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceResponse_descriptor;
24603     }
24604 
24605     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
24606         internalGetFieldAccessorTable() {
24607       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceResponse_fieldAccessorTable
24608           .ensureFieldAccessorsInitialized(
24609               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.Builder.class);
24610     }
24611 
24612     public static com.google.protobuf.Parser<CoprocessorServiceResponse> PARSER =
24613         new com.google.protobuf.AbstractParser<CoprocessorServiceResponse>() {
24614       public CoprocessorServiceResponse parsePartialFrom(
24615           com.google.protobuf.CodedInputStream input,
24616           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24617           throws com.google.protobuf.InvalidProtocolBufferException {
24618         return new CoprocessorServiceResponse(input, extensionRegistry);
24619       }
24620     };
24621 
24622     @java.lang.Override
24623     public com.google.protobuf.Parser<CoprocessorServiceResponse> getParserForType() {
24624       return PARSER;
24625     }
24626 
24627     private int bitField0_;
24628     // required .hbase.pb.RegionSpecifier region = 1;
24629     public static final int REGION_FIELD_NUMBER = 1;
24630     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_;
24631     /**
24632      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
24633      */
24634     public boolean hasRegion() {
24635       return ((bitField0_ & 0x00000001) == 0x00000001);
24636     }
24637     /**
24638      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
24639      */
24640     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
24641       return region_;
24642     }
24643     /**
24644      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
24645      */
24646     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
24647       return region_;
24648     }
24649 
24650     // required .hbase.pb.NameBytesPair value = 2;
24651     public static final int VALUE_FIELD_NUMBER = 2;
24652     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value_;
24653     /**
24654      * <code>required .hbase.pb.NameBytesPair value = 2;</code>
24655      */
24656     public boolean hasValue() {
24657       return ((bitField0_ & 0x00000002) == 0x00000002);
24658     }
24659     /**
24660      * <code>required .hbase.pb.NameBytesPair value = 2;</code>
24661      */
24662     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue() {
24663       return value_;
24664     }
24665     /**
24666      * <code>required .hbase.pb.NameBytesPair value = 2;</code>
24667      */
24668     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder() {
24669       return value_;
24670     }
24671 
24672     private void initFields() {
24673       region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
24674       value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
24675     }
24676     private byte memoizedIsInitialized = -1;
24677     public final boolean isInitialized() {
24678       byte isInitialized = memoizedIsInitialized;
24679       if (isInitialized != -1) return isInitialized == 1;
24680 
24681       if (!hasRegion()) {
24682         memoizedIsInitialized = 0;
24683         return false;
24684       }
24685       if (!hasValue()) {
24686         memoizedIsInitialized = 0;
24687         return false;
24688       }
24689       if (!getRegion().isInitialized()) {
24690         memoizedIsInitialized = 0;
24691         return false;
24692       }
24693       if (!getValue().isInitialized()) {
24694         memoizedIsInitialized = 0;
24695         return false;
24696       }
24697       memoizedIsInitialized = 1;
24698       return true;
24699     }
24700 
24701     public void writeTo(com.google.protobuf.CodedOutputStream output)
24702                         throws java.io.IOException {
24703       getSerializedSize();
24704       if (((bitField0_ & 0x00000001) == 0x00000001)) {
24705         output.writeMessage(1, region_);
24706       }
24707       if (((bitField0_ & 0x00000002) == 0x00000002)) {
24708         output.writeMessage(2, value_);
24709       }
24710       getUnknownFields().writeTo(output);
24711     }
24712 
24713     private int memoizedSerializedSize = -1;
24714     public int getSerializedSize() {
24715       int size = memoizedSerializedSize;
24716       if (size != -1) return size;
24717 
24718       size = 0;
24719       if (((bitField0_ & 0x00000001) == 0x00000001)) {
24720         size += com.google.protobuf.CodedOutputStream
24721           .computeMessageSize(1, region_);
24722       }
24723       if (((bitField0_ & 0x00000002) == 0x00000002)) {
24724         size += com.google.protobuf.CodedOutputStream
24725           .computeMessageSize(2, value_);
24726       }
24727       size += getUnknownFields().getSerializedSize();
24728       memoizedSerializedSize = size;
24729       return size;
24730     }
24731 
24732     private static final long serialVersionUID = 0L;
24733     @java.lang.Override
24734     protected java.lang.Object writeReplace()
24735         throws java.io.ObjectStreamException {
24736       return super.writeReplace();
24737     }
24738 
24739     @java.lang.Override
24740     public boolean equals(final java.lang.Object obj) {
24741       if (obj == this) {
24742        return true;
24743       }
24744       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse)) {
24745         return super.equals(obj);
24746       }
24747       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse) obj;
24748 
24749       boolean result = true;
24750       result = result && (hasRegion() == other.hasRegion());
24751       if (hasRegion()) {
24752         result = result && getRegion()
24753             .equals(other.getRegion());
24754       }
24755       result = result && (hasValue() == other.hasValue());
24756       if (hasValue()) {
24757         result = result && getValue()
24758             .equals(other.getValue());
24759       }
24760       result = result &&
24761           getUnknownFields().equals(other.getUnknownFields());
24762       return result;
24763     }
24764 
24765     private int memoizedHashCode = 0;
24766     @java.lang.Override
24767     public int hashCode() {
24768       if (memoizedHashCode != 0) {
24769         return memoizedHashCode;
24770       }
24771       int hash = 41;
24772       hash = (19 * hash) + getDescriptorForType().hashCode();
24773       if (hasRegion()) {
24774         hash = (37 * hash) + REGION_FIELD_NUMBER;
24775         hash = (53 * hash) + getRegion().hashCode();
24776       }
24777       if (hasValue()) {
24778         hash = (37 * hash) + VALUE_FIELD_NUMBER;
24779         hash = (53 * hash) + getValue().hashCode();
24780       }
24781       hash = (29 * hash) + getUnknownFields().hashCode();
24782       memoizedHashCode = hash;
24783       return hash;
24784     }
24785 
24786     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom(
24787         com.google.protobuf.ByteString data)
24788         throws com.google.protobuf.InvalidProtocolBufferException {
24789       return PARSER.parseFrom(data);
24790     }
24791     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom(
24792         com.google.protobuf.ByteString data,
24793         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24794         throws com.google.protobuf.InvalidProtocolBufferException {
24795       return PARSER.parseFrom(data, extensionRegistry);
24796     }
24797     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom(byte[] data)
24798         throws com.google.protobuf.InvalidProtocolBufferException {
24799       return PARSER.parseFrom(data);
24800     }
24801     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom(
24802         byte[] data,
24803         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24804         throws com.google.protobuf.InvalidProtocolBufferException {
24805       return PARSER.parseFrom(data, extensionRegistry);
24806     }
24807     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom(java.io.InputStream input)
24808         throws java.io.IOException {
24809       return PARSER.parseFrom(input);
24810     }
24811     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom(
24812         java.io.InputStream input,
24813         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24814         throws java.io.IOException {
24815       return PARSER.parseFrom(input, extensionRegistry);
24816     }
24817     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseDelimitedFrom(java.io.InputStream input)
24818         throws java.io.IOException {
24819       return PARSER.parseDelimitedFrom(input);
24820     }
24821     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseDelimitedFrom(
24822         java.io.InputStream input,
24823         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24824         throws java.io.IOException {
24825       return PARSER.parseDelimitedFrom(input, extensionRegistry);
24826     }
24827     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom(
24828         com.google.protobuf.CodedInputStream input)
24829         throws java.io.IOException {
24830       return PARSER.parseFrom(input);
24831     }
24832     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom(
24833         com.google.protobuf.CodedInputStream input,
24834         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24835         throws java.io.IOException {
24836       return PARSER.parseFrom(input, extensionRegistry);
24837     }
24838 
24839     public static Builder newBuilder() { return Builder.create(); }
24840     public Builder newBuilderForType() { return newBuilder(); }
24841     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse prototype) {
24842       return newBuilder().mergeFrom(prototype);
24843     }
24844     public Builder toBuilder() { return newBuilder(this); }
24845 
24846     @java.lang.Override
24847     protected Builder newBuilderForType(
24848         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
24849       Builder builder = new Builder(parent);
24850       return builder;
24851     }
24852     /**
24853      * Protobuf type {@code hbase.pb.CoprocessorServiceResponse}
24854      */
24855     public static final class Builder extends
24856         com.google.protobuf.GeneratedMessage.Builder<Builder>
24857        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrBuilder {
24858       public static final com.google.protobuf.Descriptors.Descriptor
24859           getDescriptor() {
24860         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceResponse_descriptor;
24861       }
24862 
24863       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
24864           internalGetFieldAccessorTable() {
24865         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceResponse_fieldAccessorTable
24866             .ensureFieldAccessorsInitialized(
24867                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.Builder.class);
24868       }
24869 
24870       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.newBuilder()
24871       private Builder() {
24872         maybeForceBuilderInitialization();
24873       }
24874 
24875       private Builder(
24876           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
24877         super(parent);
24878         maybeForceBuilderInitialization();
24879       }
24880       private void maybeForceBuilderInitialization() {
24881         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
24882           getRegionFieldBuilder();
24883           getValueFieldBuilder();
24884         }
24885       }
24886       private static Builder create() {
24887         return new Builder();
24888       }
24889 
24890       public Builder clear() {
24891         super.clear();
24892         if (regionBuilder_ == null) {
24893           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
24894         } else {
24895           regionBuilder_.clear();
24896         }
24897         bitField0_ = (bitField0_ & ~0x00000001);
24898         if (valueBuilder_ == null) {
24899           value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
24900         } else {
24901           valueBuilder_.clear();
24902         }
24903         bitField0_ = (bitField0_ & ~0x00000002);
24904         return this;
24905       }
24906 
24907       public Builder clone() {
24908         return create().mergeFrom(buildPartial());
24909       }
24910 
24911       public com.google.protobuf.Descriptors.Descriptor
24912           getDescriptorForType() {
24913         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceResponse_descriptor;
24914       }
24915 
24916       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse getDefaultInstanceForType() {
24917         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance();
24918       }
24919 
24920       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse build() {
24921         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse result = buildPartial();
24922         if (!result.isInitialized()) {
24923           throw newUninitializedMessageException(result);
24924         }
24925         return result;
24926       }
24927 
24928       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse buildPartial() {
24929         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse(this);
24930         int from_bitField0_ = bitField0_;
24931         int to_bitField0_ = 0;
24932         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
24933           to_bitField0_ |= 0x00000001;
24934         }
24935         if (regionBuilder_ == null) {
24936           result.region_ = region_;
24937         } else {
24938           result.region_ = regionBuilder_.build();
24939         }
24940         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
24941           to_bitField0_ |= 0x00000002;
24942         }
24943         if (valueBuilder_ == null) {
24944           result.value_ = value_;
24945         } else {
24946           result.value_ = valueBuilder_.build();
24947         }
24948         result.bitField0_ = to_bitField0_;
24949         onBuilt();
24950         return result;
24951       }
24952 
24953       public Builder mergeFrom(com.google.protobuf.Message other) {
24954         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse) {
24955           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse)other);
24956         } else {
24957           super.mergeFrom(other);
24958           return this;
24959         }
24960       }
24961 
24962       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse other) {
24963         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance()) return this;
24964         if (other.hasRegion()) {
24965           mergeRegion(other.getRegion());
24966         }
24967         if (other.hasValue()) {
24968           mergeValue(other.getValue());
24969         }
24970         this.mergeUnknownFields(other.getUnknownFields());
24971         return this;
24972       }
24973 
24974       public final boolean isInitialized() {
24975         if (!hasRegion()) {
24976           
24977           return false;
24978         }
24979         if (!hasValue()) {
24980           
24981           return false;
24982         }
24983         if (!getRegion().isInitialized()) {
24984           
24985           return false;
24986         }
24987         if (!getValue().isInitialized()) {
24988           
24989           return false;
24990         }
24991         return true;
24992       }
24993 
24994       public Builder mergeFrom(
24995           com.google.protobuf.CodedInputStream input,
24996           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24997           throws java.io.IOException {
24998         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parsedMessage = null;
24999         try {
25000           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
25001         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
25002           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse) e.getUnfinishedMessage();
25003           throw e;
25004         } finally {
25005           if (parsedMessage != null) {
25006             mergeFrom(parsedMessage);
25007           }
25008         }
25009         return this;
25010       }
25011       private int bitField0_;
25012 
25013       // required .hbase.pb.RegionSpecifier region = 1;
25014       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
25015       private com.google.protobuf.SingleFieldBuilder<
25016           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_;
25017       /**
25018        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
25019        */
25020       public boolean hasRegion() {
25021         return ((bitField0_ & 0x00000001) == 0x00000001);
25022       }
25023       /**
25024        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
25025        */
25026       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
25027         if (regionBuilder_ == null) {
25028           return region_;
25029         } else {
25030           return regionBuilder_.getMessage();
25031         }
25032       }
25033       /**
25034        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
25035        */
25036       public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
25037         if (regionBuilder_ == null) {
25038           if (value == null) {
25039             throw new NullPointerException();
25040           }
25041           region_ = value;
25042           onChanged();
25043         } else {
25044           regionBuilder_.setMessage(value);
25045         }
25046         bitField0_ |= 0x00000001;
25047         return this;
25048       }
25049       /**
25050        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
25051        */
25052       public Builder setRegion(
25053           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
25054         if (regionBuilder_ == null) {
25055           region_ = builderForValue.build();
25056           onChanged();
25057         } else {
25058           regionBuilder_.setMessage(builderForValue.build());
25059         }
25060         bitField0_ |= 0x00000001;
25061         return this;
25062       }
25063       /**
25064        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
25065        */
25066       public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
25067         if (regionBuilder_ == null) {
25068           if (((bitField0_ & 0x00000001) == 0x00000001) &&
25069               region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) {
25070             region_ =
25071               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial();
25072           } else {
25073             region_ = value;
25074           }
25075           onChanged();
25076         } else {
25077           regionBuilder_.mergeFrom(value);
25078         }
25079         bitField0_ |= 0x00000001;
25080         return this;
25081       }
25082       /**
25083        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
25084        */
25085       public Builder clearRegion() {
25086         if (regionBuilder_ == null) {
25087           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
25088           onChanged();
25089         } else {
25090           regionBuilder_.clear();
25091         }
25092         bitField0_ = (bitField0_ & ~0x00000001);
25093         return this;
25094       }
25095       /**
25096        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
25097        */
25098       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() {
25099         bitField0_ |= 0x00000001;
25100         onChanged();
25101         return getRegionFieldBuilder().getBuilder();
25102       }
25103       /**
25104        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
25105        */
25106       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
25107         if (regionBuilder_ != null) {
25108           return regionBuilder_.getMessageOrBuilder();
25109         } else {
25110           return region_;
25111         }
25112       }
25113       /**
25114        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
25115        */
25116       private com.google.protobuf.SingleFieldBuilder<
25117           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> 
25118           getRegionFieldBuilder() {
25119         if (regionBuilder_ == null) {
25120           regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
25121               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
25122                   region_,
25123                   getParentForChildren(),
25124                   isClean());
25125           region_ = null;
25126         }
25127         return regionBuilder_;
25128       }
25129 
25130       // required .hbase.pb.NameBytesPair value = 2;
25131       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
25132       private com.google.protobuf.SingleFieldBuilder<
25133           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> valueBuilder_;
25134       /**
25135        * <code>required .hbase.pb.NameBytesPair value = 2;</code>
25136        */
25137       public boolean hasValue() {
25138         return ((bitField0_ & 0x00000002) == 0x00000002);
25139       }
25140       /**
25141        * <code>required .hbase.pb.NameBytesPair value = 2;</code>
25142        */
25143       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue() {
25144         if (valueBuilder_ == null) {
25145           return value_;
25146         } else {
25147           return valueBuilder_.getMessage();
25148         }
25149       }
25150       /**
25151        * <code>required .hbase.pb.NameBytesPair value = 2;</code>
25152        */
25153       public Builder setValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
25154         if (valueBuilder_ == null) {
25155           if (value == null) {
25156             throw new NullPointerException();
25157           }
25158           value_ = value;
25159           onChanged();
25160         } else {
25161           valueBuilder_.setMessage(value);
25162         }
25163         bitField0_ |= 0x00000002;
25164         return this;
25165       }
25166       /**
25167        * <code>required .hbase.pb.NameBytesPair value = 2;</code>
25168        */
25169       public Builder setValue(
25170           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
25171         if (valueBuilder_ == null) {
25172           value_ = builderForValue.build();
25173           onChanged();
25174         } else {
25175           valueBuilder_.setMessage(builderForValue.build());
25176         }
25177         bitField0_ |= 0x00000002;
25178         return this;
25179       }
25180       /**
25181        * <code>required .hbase.pb.NameBytesPair value = 2;</code>
25182        */
25183       public Builder mergeValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
25184         if (valueBuilder_ == null) {
25185           if (((bitField0_ & 0x00000002) == 0x00000002) &&
25186               value_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) {
25187             value_ =
25188               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(value_).mergeFrom(value).buildPartial();
25189           } else {
25190             value_ = value;
25191           }
25192           onChanged();
25193         } else {
25194           valueBuilder_.mergeFrom(value);
25195         }
25196         bitField0_ |= 0x00000002;
25197         return this;
25198       }
25199       /**
25200        * <code>required .hbase.pb.NameBytesPair value = 2;</code>
25201        */
25202       public Builder clearValue() {
25203         if (valueBuilder_ == null) {
25204           value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
25205           onChanged();
25206         } else {
25207           valueBuilder_.clear();
25208         }
25209         bitField0_ = (bitField0_ & ~0x00000002);
25210         return this;
25211       }
25212       /**
25213        * <code>required .hbase.pb.NameBytesPair value = 2;</code>
25214        */
25215       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getValueBuilder() {
25216         bitField0_ |= 0x00000002;
25217         onChanged();
25218         return getValueFieldBuilder().getBuilder();
25219       }
25220       /**
25221        * <code>required .hbase.pb.NameBytesPair value = 2;</code>
25222        */
25223       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder() {
25224         if (valueBuilder_ != null) {
25225           return valueBuilder_.getMessageOrBuilder();
25226         } else {
25227           return value_;
25228         }
25229       }
25230       /**
25231        * <code>required .hbase.pb.NameBytesPair value = 2;</code>
25232        */
25233       private com.google.protobuf.SingleFieldBuilder<
25234           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> 
25235           getValueFieldBuilder() {
25236         if (valueBuilder_ == null) {
25237           valueBuilder_ = new com.google.protobuf.SingleFieldBuilder<
25238               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>(
25239                   value_,
25240                   getParentForChildren(),
25241                   isClean());
25242           value_ = null;
25243         }
25244         return valueBuilder_;
25245       }
25246 
25247       // @@protoc_insertion_point(builder_scope:hbase.pb.CoprocessorServiceResponse)
25248     }
25249 
25250     static {
25251       defaultInstance = new CoprocessorServiceResponse(true);
25252       defaultInstance.initFields();
25253     }
25254 
25255     // @@protoc_insertion_point(class_scope:hbase.pb.CoprocessorServiceResponse)
25256   }
25257 
25258   public interface ActionOrBuilder
25259       extends com.google.protobuf.MessageOrBuilder {
25260 
25261     // optional uint32 index = 1;
25262     /**
25263      * <code>optional uint32 index = 1;</code>
25264      *
25265      * <pre>
25266      * If part of a multi action, useful aligning
25267      * result with what was originally submitted.
25268      * </pre>
25269      */
25270     boolean hasIndex();
25271     /**
25272      * <code>optional uint32 index = 1;</code>
25273      *
25274      * <pre>
25275      * If part of a multi action, useful aligning
25276      * result with what was originally submitted.
25277      * </pre>
25278      */
25279     int getIndex();
25280 
25281     // optional .hbase.pb.MutationProto mutation = 2;
25282     /**
25283      * <code>optional .hbase.pb.MutationProto mutation = 2;</code>
25284      */
25285     boolean hasMutation();
25286     /**
25287      * <code>optional .hbase.pb.MutationProto mutation = 2;</code>
25288      */
25289     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutation();
25290     /**
25291      * <code>optional .hbase.pb.MutationProto mutation = 2;</code>
25292      */
25293     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationOrBuilder();
25294 
25295     // optional .hbase.pb.Get get = 3;
25296     /**
25297      * <code>optional .hbase.pb.Get get = 3;</code>
25298      */
25299     boolean hasGet();
25300     /**
25301      * <code>optional .hbase.pb.Get get = 3;</code>
25302      */
25303     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet();
25304     /**
25305      * <code>optional .hbase.pb.Get get = 3;</code>
25306      */
25307     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder();
25308 
25309     // optional .hbase.pb.CoprocessorServiceCall service_call = 4;
25310     /**
25311      * <code>optional .hbase.pb.CoprocessorServiceCall service_call = 4;</code>
25312      */
25313     boolean hasServiceCall();
25314     /**
25315      * <code>optional .hbase.pb.CoprocessorServiceCall service_call = 4;</code>
25316      */
25317     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall getServiceCall();
25318     /**
25319      * <code>optional .hbase.pb.CoprocessorServiceCall service_call = 4;</code>
25320      */
25321     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder getServiceCallOrBuilder();
25322   }
25323   /**
25324    * Protobuf type {@code hbase.pb.Action}
25325    *
25326    * <pre>
25327    * Either a Get or a Mutation
25328    * </pre>
25329    */
25330   public static final class Action extends
25331       com.google.protobuf.GeneratedMessage
25332       implements ActionOrBuilder {
25333     // Use Action.newBuilder() to construct.
25334     private Action(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
25335       super(builder);
25336       this.unknownFields = builder.getUnknownFields();
25337     }
25338     private Action(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
25339 
25340     private static final Action defaultInstance;
25341     public static Action getDefaultInstance() {
25342       return defaultInstance;
25343     }
25344 
25345     public Action getDefaultInstanceForType() {
25346       return defaultInstance;
25347     }
25348 
25349     private final com.google.protobuf.UnknownFieldSet unknownFields;
25350     @java.lang.Override
25351     public final com.google.protobuf.UnknownFieldSet
25352         getUnknownFields() {
25353       return this.unknownFields;
25354     }
25355     private Action(
25356         com.google.protobuf.CodedInputStream input,
25357         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
25358         throws com.google.protobuf.InvalidProtocolBufferException {
25359       initFields();
25360       int mutable_bitField0_ = 0;
25361       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
25362           com.google.protobuf.UnknownFieldSet.newBuilder();
25363       try {
25364         boolean done = false;
25365         while (!done) {
25366           int tag = input.readTag();
25367           switch (tag) {
25368             case 0:
25369               done = true;
25370               break;
25371             default: {
25372               if (!parseUnknownField(input, unknownFields,
25373                                      extensionRegistry, tag)) {
25374                 done = true;
25375               }
25376               break;
25377             }
25378             case 8: {
25379               bitField0_ |= 0x00000001;
25380               index_ = input.readUInt32();
25381               break;
25382             }
25383             case 18: {
25384               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder subBuilder = null;
25385               if (((bitField0_ & 0x00000002) == 0x00000002)) {
25386                 subBuilder = mutation_.toBuilder();
25387               }
25388               mutation_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.PARSER, extensionRegistry);
25389               if (subBuilder != null) {
25390                 subBuilder.mergeFrom(mutation_);
25391                 mutation_ = subBuilder.buildPartial();
25392               }
25393               bitField0_ |= 0x00000002;
25394               break;
25395             }
25396             case 26: {
25397               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder subBuilder = null;
25398               if (((bitField0_ & 0x00000004) == 0x00000004)) {
25399                 subBuilder = get_.toBuilder();
25400               }
25401               get_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.PARSER, extensionRegistry);
25402               if (subBuilder != null) {
25403                 subBuilder.mergeFrom(get_);
25404                 get_ = subBuilder.buildPartial();
25405               }
25406               bitField0_ |= 0x00000004;
25407               break;
25408             }
25409             case 34: {
25410               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder subBuilder = null;
25411               if (((bitField0_ & 0x00000008) == 0x00000008)) {
25412                 subBuilder = serviceCall_.toBuilder();
25413               }
25414               serviceCall_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.PARSER, extensionRegistry);
25415               if (subBuilder != null) {
25416                 subBuilder.mergeFrom(serviceCall_);
25417                 serviceCall_ = subBuilder.buildPartial();
25418               }
25419               bitField0_ |= 0x00000008;
25420               break;
25421             }
25422           }
25423         }
25424       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
25425         throw e.setUnfinishedMessage(this);
25426       } catch (java.io.IOException e) {
25427         throw new com.google.protobuf.InvalidProtocolBufferException(
25428             e.getMessage()).setUnfinishedMessage(this);
25429       } finally {
25430         this.unknownFields = unknownFields.build();
25431         makeExtensionsImmutable();
25432       }
25433     }
25434     public static final com.google.protobuf.Descriptors.Descriptor
25435         getDescriptor() {
25436       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Action_descriptor;
25437     }
25438 
25439     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
25440         internalGetFieldAccessorTable() {
25441       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Action_fieldAccessorTable
25442           .ensureFieldAccessorsInitialized(
25443               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.Builder.class);
25444     }
25445 
25446     public static com.google.protobuf.Parser<Action> PARSER =
25447         new com.google.protobuf.AbstractParser<Action>() {
25448       public Action parsePartialFrom(
25449           com.google.protobuf.CodedInputStream input,
25450           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
25451           throws com.google.protobuf.InvalidProtocolBufferException {
25452         return new Action(input, extensionRegistry);
25453       }
25454     };
25455 
25456     @java.lang.Override
25457     public com.google.protobuf.Parser<Action> getParserForType() {
25458       return PARSER;
25459     }
25460 
25461     private int bitField0_;
25462     // optional uint32 index = 1;
25463     public static final int INDEX_FIELD_NUMBER = 1;
25464     private int index_;
25465     /**
25466      * <code>optional uint32 index = 1;</code>
25467      *
25468      * <pre>
25469      * If part of a multi action, useful aligning
25470      * result with what was originally submitted.
25471      * </pre>
25472      */
25473     public boolean hasIndex() {
25474       return ((bitField0_ & 0x00000001) == 0x00000001);
25475     }
25476     /**
25477      * <code>optional uint32 index = 1;</code>
25478      *
25479      * <pre>
25480      * If part of a multi action, useful aligning
25481      * result with what was originally submitted.
25482      * </pre>
25483      */
25484     public int getIndex() {
25485       return index_;
25486     }
25487 
25488     // optional .hbase.pb.MutationProto mutation = 2;
25489     public static final int MUTATION_FIELD_NUMBER = 2;
25490     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto mutation_;
25491     /**
25492      * <code>optional .hbase.pb.MutationProto mutation = 2;</code>
25493      */
25494     public boolean hasMutation() {
25495       return ((bitField0_ & 0x00000002) == 0x00000002);
25496     }
25497     /**
25498      * <code>optional .hbase.pb.MutationProto mutation = 2;</code>
25499      */
25500     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutation() {
25501       return mutation_;
25502     }
25503     /**
25504      * <code>optional .hbase.pb.MutationProto mutation = 2;</code>
25505      */
25506     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationOrBuilder() {
25507       return mutation_;
25508     }
25509 
25510     // optional .hbase.pb.Get get = 3;
25511     public static final int GET_FIELD_NUMBER = 3;
25512     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get get_;
25513     /**
25514      * <code>optional .hbase.pb.Get get = 3;</code>
25515      */
25516     public boolean hasGet() {
25517       return ((bitField0_ & 0x00000004) == 0x00000004);
25518     }
25519     /**
25520      * <code>optional .hbase.pb.Get get = 3;</code>
25521      */
25522     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet() {
25523       return get_;
25524     }
25525     /**
25526      * <code>optional .hbase.pb.Get get = 3;</code>
25527      */
25528     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder() {
25529       return get_;
25530     }
25531 
25532     // optional .hbase.pb.CoprocessorServiceCall service_call = 4;
25533     public static final int SERVICE_CALL_FIELD_NUMBER = 4;
25534     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall serviceCall_;
25535     /**
25536      * <code>optional .hbase.pb.CoprocessorServiceCall service_call = 4;</code>
25537      */
25538     public boolean hasServiceCall() {
25539       return ((bitField0_ & 0x00000008) == 0x00000008);
25540     }
25541     /**
25542      * <code>optional .hbase.pb.CoprocessorServiceCall service_call = 4;</code>
25543      */
25544     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall getServiceCall() {
25545       return serviceCall_;
25546     }
25547     /**
25548      * <code>optional .hbase.pb.CoprocessorServiceCall service_call = 4;</code>
25549      */
25550     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder getServiceCallOrBuilder() {
25551       return serviceCall_;
25552     }
25553 
25554     private void initFields() {
25555       index_ = 0;
25556       mutation_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance();
25557       get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance();
25558       serviceCall_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance();
25559     }
25560     private byte memoizedIsInitialized = -1;
25561     public final boolean isInitialized() {
25562       byte isInitialized = memoizedIsInitialized;
25563       if (isInitialized != -1) return isInitialized == 1;
25564 
25565       if (hasMutation()) {
25566         if (!getMutation().isInitialized()) {
25567           memoizedIsInitialized = 0;
25568           return false;
25569         }
25570       }
25571       if (hasGet()) {
25572         if (!getGet().isInitialized()) {
25573           memoizedIsInitialized = 0;
25574           return false;
25575         }
25576       }
25577       if (hasServiceCall()) {
25578         if (!getServiceCall().isInitialized()) {
25579           memoizedIsInitialized = 0;
25580           return false;
25581         }
25582       }
25583       memoizedIsInitialized = 1;
25584       return true;
25585     }
25586 
25587     public void writeTo(com.google.protobuf.CodedOutputStream output)
25588                         throws java.io.IOException {
25589       getSerializedSize();
25590       if (((bitField0_ & 0x00000001) == 0x00000001)) {
25591         output.writeUInt32(1, index_);
25592       }
25593       if (((bitField0_ & 0x00000002) == 0x00000002)) {
25594         output.writeMessage(2, mutation_);
25595       }
25596       if (((bitField0_ & 0x00000004) == 0x00000004)) {
25597         output.writeMessage(3, get_);
25598       }
25599       if (((bitField0_ & 0x00000008) == 0x00000008)) {
25600         output.writeMessage(4, serviceCall_);
25601       }
25602       getUnknownFields().writeTo(output);
25603     }
25604 
25605     private int memoizedSerializedSize = -1;
25606     public int getSerializedSize() {
25607       int size = memoizedSerializedSize;
25608       if (size != -1) return size;
25609 
25610       size = 0;
25611       if (((bitField0_ & 0x00000001) == 0x00000001)) {
25612         size += com.google.protobuf.CodedOutputStream
25613           .computeUInt32Size(1, index_);
25614       }
25615       if (((bitField0_ & 0x00000002) == 0x00000002)) {
25616         size += com.google.protobuf.CodedOutputStream
25617           .computeMessageSize(2, mutation_);
25618       }
25619       if (((bitField0_ & 0x00000004) == 0x00000004)) {
25620         size += com.google.protobuf.CodedOutputStream
25621           .computeMessageSize(3, get_);
25622       }
25623       if (((bitField0_ & 0x00000008) == 0x00000008)) {
25624         size += com.google.protobuf.CodedOutputStream
25625           .computeMessageSize(4, serviceCall_);
25626       }
25627       size += getUnknownFields().getSerializedSize();
25628       memoizedSerializedSize = size;
25629       return size;
25630     }
25631 
25632     private static final long serialVersionUID = 0L;
25633     @java.lang.Override
25634     protected java.lang.Object writeReplace()
25635         throws java.io.ObjectStreamException {
25636       return super.writeReplace();
25637     }
25638 
25639     @java.lang.Override
25640     public boolean equals(final java.lang.Object obj) {
25641       if (obj == this) {
25642        return true;
25643       }
25644       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action)) {
25645         return super.equals(obj);
25646       }
25647       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action) obj;
25648 
25649       boolean result = true;
25650       result = result && (hasIndex() == other.hasIndex());
25651       if (hasIndex()) {
25652         result = result && (getIndex()
25653             == other.getIndex());
25654       }
25655       result = result && (hasMutation() == other.hasMutation());
25656       if (hasMutation()) {
25657         result = result && getMutation()
25658             .equals(other.getMutation());
25659       }
25660       result = result && (hasGet() == other.hasGet());
25661       if (hasGet()) {
25662         result = result && getGet()
25663             .equals(other.getGet());
25664       }
25665       result = result && (hasServiceCall() == other.hasServiceCall());
25666       if (hasServiceCall()) {
25667         result = result && getServiceCall()
25668             .equals(other.getServiceCall());
25669       }
25670       result = result &&
25671           getUnknownFields().equals(other.getUnknownFields());
25672       return result;
25673     }
25674 
25675     private int memoizedHashCode = 0;
25676     @java.lang.Override
25677     public int hashCode() {
25678       if (memoizedHashCode != 0) {
25679         return memoizedHashCode;
25680       }
25681       int hash = 41;
25682       hash = (19 * hash) + getDescriptorForType().hashCode();
25683       if (hasIndex()) {
25684         hash = (37 * hash) + INDEX_FIELD_NUMBER;
25685         hash = (53 * hash) + getIndex();
25686       }
25687       if (hasMutation()) {
25688         hash = (37 * hash) + MUTATION_FIELD_NUMBER;
25689         hash = (53 * hash) + getMutation().hashCode();
25690       }
25691       if (hasGet()) {
25692         hash = (37 * hash) + GET_FIELD_NUMBER;
25693         hash = (53 * hash) + getGet().hashCode();
25694       }
25695       if (hasServiceCall()) {
25696         hash = (37 * hash) + SERVICE_CALL_FIELD_NUMBER;
25697         hash = (53 * hash) + getServiceCall().hashCode();
25698       }
25699       hash = (29 * hash) + getUnknownFields().hashCode();
25700       memoizedHashCode = hash;
25701       return hash;
25702     }
25703 
25704     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action parseFrom(
25705         com.google.protobuf.ByteString data)
25706         throws com.google.protobuf.InvalidProtocolBufferException {
25707       return PARSER.parseFrom(data);
25708     }
25709     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action parseFrom(
25710         com.google.protobuf.ByteString data,
25711         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
25712         throws com.google.protobuf.InvalidProtocolBufferException {
25713       return PARSER.parseFrom(data, extensionRegistry);
25714     }
25715     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action parseFrom(byte[] data)
25716         throws com.google.protobuf.InvalidProtocolBufferException {
25717       return PARSER.parseFrom(data);
25718     }
25719     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action parseFrom(
25720         byte[] data,
25721         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
25722         throws com.google.protobuf.InvalidProtocolBufferException {
25723       return PARSER.parseFrom(data, extensionRegistry);
25724     }
25725     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action parseFrom(java.io.InputStream input)
25726         throws java.io.IOException {
25727       return PARSER.parseFrom(input);
25728     }
25729     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action parseFrom(
25730         java.io.InputStream input,
25731         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
25732         throws java.io.IOException {
25733       return PARSER.parseFrom(input, extensionRegistry);
25734     }
25735     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action parseDelimitedFrom(java.io.InputStream input)
25736         throws java.io.IOException {
25737       return PARSER.parseDelimitedFrom(input);
25738     }
25739     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action parseDelimitedFrom(
25740         java.io.InputStream input,
25741         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
25742         throws java.io.IOException {
25743       return PARSER.parseDelimitedFrom(input, extensionRegistry);
25744     }
25745     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action parseFrom(
25746         com.google.protobuf.CodedInputStream input)
25747         throws java.io.IOException {
25748       return PARSER.parseFrom(input);
25749     }
25750     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action parseFrom(
25751         com.google.protobuf.CodedInputStream input,
25752         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
25753         throws java.io.IOException {
25754       return PARSER.parseFrom(input, extensionRegistry);
25755     }
25756 
25757     public static Builder newBuilder() { return Builder.create(); }
25758     public Builder newBuilderForType() { return newBuilder(); }
25759     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action prototype) {
25760       return newBuilder().mergeFrom(prototype);
25761     }
25762     public Builder toBuilder() { return newBuilder(this); }
25763 
25764     @java.lang.Override
25765     protected Builder newBuilderForType(
25766         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
25767       Builder builder = new Builder(parent);
25768       return builder;
25769     }
25770     /**
25771      * Protobuf type {@code hbase.pb.Action}
25772      *
25773      * <pre>
25774      * Either a Get or a Mutation
25775      * </pre>
25776      */
25777     public static final class Builder extends
25778         com.google.protobuf.GeneratedMessage.Builder<Builder>
25779        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionOrBuilder {
25780       public static final com.google.protobuf.Descriptors.Descriptor
25781           getDescriptor() {
25782         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Action_descriptor;
25783       }
25784 
25785       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
25786           internalGetFieldAccessorTable() {
25787         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Action_fieldAccessorTable
25788             .ensureFieldAccessorsInitialized(
25789                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.Builder.class);
25790       }
25791 
25792       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.newBuilder()
25793       private Builder() {
25794         maybeForceBuilderInitialization();
25795       }
25796 
25797       private Builder(
25798           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
25799         super(parent);
25800         maybeForceBuilderInitialization();
25801       }
25802       private void maybeForceBuilderInitialization() {
25803         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
25804           getMutationFieldBuilder();
25805           getGetFieldBuilder();
25806           getServiceCallFieldBuilder();
25807         }
25808       }
25809       private static Builder create() {
25810         return new Builder();
25811       }
25812 
25813       public Builder clear() {
25814         super.clear();
25815         index_ = 0;
25816         bitField0_ = (bitField0_ & ~0x00000001);
25817         if (mutationBuilder_ == null) {
25818           mutation_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance();
25819         } else {
25820           mutationBuilder_.clear();
25821         }
25822         bitField0_ = (bitField0_ & ~0x00000002);
25823         if (getBuilder_ == null) {
25824           get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance();
25825         } else {
25826           getBuilder_.clear();
25827         }
25828         bitField0_ = (bitField0_ & ~0x00000004);
25829         if (serviceCallBuilder_ == null) {
25830           serviceCall_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance();
25831         } else {
25832           serviceCallBuilder_.clear();
25833         }
25834         bitField0_ = (bitField0_ & ~0x00000008);
25835         return this;
25836       }
25837 
25838       public Builder clone() {
25839         return create().mergeFrom(buildPartial());
25840       }
25841 
25842       public com.google.protobuf.Descriptors.Descriptor
25843           getDescriptorForType() {
25844         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Action_descriptor;
25845       }
25846 
25847       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action getDefaultInstanceForType() {
25848         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.getDefaultInstance();
25849       }
25850 
25851       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action build() {
25852         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action result = buildPartial();
25853         if (!result.isInitialized()) {
25854           throw newUninitializedMessageException(result);
25855         }
25856         return result;
25857       }
25858 
25859       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action buildPartial() {
25860         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action(this);
25861         int from_bitField0_ = bitField0_;
25862         int to_bitField0_ = 0;
25863         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
25864           to_bitField0_ |= 0x00000001;
25865         }
25866         result.index_ = index_;
25867         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
25868           to_bitField0_ |= 0x00000002;
25869         }
25870         if (mutationBuilder_ == null) {
25871           result.mutation_ = mutation_;
25872         } else {
25873           result.mutation_ = mutationBuilder_.build();
25874         }
25875         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
25876           to_bitField0_ |= 0x00000004;
25877         }
25878         if (getBuilder_ == null) {
25879           result.get_ = get_;
25880         } else {
25881           result.get_ = getBuilder_.build();
25882         }
25883         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
25884           to_bitField0_ |= 0x00000008;
25885         }
25886         if (serviceCallBuilder_ == null) {
25887           result.serviceCall_ = serviceCall_;
25888         } else {
25889           result.serviceCall_ = serviceCallBuilder_.build();
25890         }
25891         result.bitField0_ = to_bitField0_;
25892         onBuilt();
25893         return result;
25894       }
25895 
25896       public Builder mergeFrom(com.google.protobuf.Message other) {
25897         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action) {
25898           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action)other);
25899         } else {
25900           super.mergeFrom(other);
25901           return this;
25902         }
25903       }
25904 
25905       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action other) {
25906         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.getDefaultInstance()) return this;
25907         if (other.hasIndex()) {
25908           setIndex(other.getIndex());
25909         }
25910         if (other.hasMutation()) {
25911           mergeMutation(other.getMutation());
25912         }
25913         if (other.hasGet()) {
25914           mergeGet(other.getGet());
25915         }
25916         if (other.hasServiceCall()) {
25917           mergeServiceCall(other.getServiceCall());
25918         }
25919         this.mergeUnknownFields(other.getUnknownFields());
25920         return this;
25921       }
25922 
25923       public final boolean isInitialized() {
25924         if (hasMutation()) {
25925           if (!getMutation().isInitialized()) {
25926             
25927             return false;
25928           }
25929         }
25930         if (hasGet()) {
25931           if (!getGet().isInitialized()) {
25932             
25933             return false;
25934           }
25935         }
25936         if (hasServiceCall()) {
25937           if (!getServiceCall().isInitialized()) {
25938             
25939             return false;
25940           }
25941         }
25942         return true;
25943       }
25944 
25945       public Builder mergeFrom(
25946           com.google.protobuf.CodedInputStream input,
25947           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
25948           throws java.io.IOException {
25949         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action parsedMessage = null;
25950         try {
25951           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
25952         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
25953           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action) e.getUnfinishedMessage();
25954           throw e;
25955         } finally {
25956           if (parsedMessage != null) {
25957             mergeFrom(parsedMessage);
25958           }
25959         }
25960         return this;
25961       }
25962       private int bitField0_;
25963 
25964       // optional uint32 index = 1;
25965       private int index_ ;
25966       /**
25967        * <code>optional uint32 index = 1;</code>
25968        *
25969        * <pre>
25970        * If part of a multi action, useful aligning
25971        * result with what was originally submitted.
25972        * </pre>
25973        */
25974       public boolean hasIndex() {
25975         return ((bitField0_ & 0x00000001) == 0x00000001);
25976       }
25977       /**
25978        * <code>optional uint32 index = 1;</code>
25979        *
25980        * <pre>
25981        * If part of a multi action, useful aligning
25982        * result with what was originally submitted.
25983        * </pre>
25984        */
25985       public int getIndex() {
25986         return index_;
25987       }
25988       /**
25989        * <code>optional uint32 index = 1;</code>
25990        *
25991        * <pre>
25992        * If part of a multi action, useful aligning
25993        * result with what was originally submitted.
25994        * </pre>
25995        */
25996       public Builder setIndex(int value) {
25997         bitField0_ |= 0x00000001;
25998         index_ = value;
25999         onChanged();
26000         return this;
26001       }
26002       /**
26003        * <code>optional uint32 index = 1;</code>
26004        *
26005        * <pre>
26006        * If part of a multi action, useful aligning
26007        * result with what was originally submitted.
26008        * </pre>
26009        */
26010       public Builder clearIndex() {
26011         bitField0_ = (bitField0_ & ~0x00000001);
26012         index_ = 0;
26013         onChanged();
26014         return this;
26015       }
26016 
26017       // optional .hbase.pb.MutationProto mutation = 2;
26018       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto mutation_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance();
26019       private com.google.protobuf.SingleFieldBuilder<
26020           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder> mutationBuilder_;
26021       /**
26022        * <code>optional .hbase.pb.MutationProto mutation = 2;</code>
26023        */
26024       public boolean hasMutation() {
26025         return ((bitField0_ & 0x00000002) == 0x00000002);
26026       }
26027       /**
26028        * <code>optional .hbase.pb.MutationProto mutation = 2;</code>
26029        */
26030       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutation() {
26031         if (mutationBuilder_ == null) {
26032           return mutation_;
26033         } else {
26034           return mutationBuilder_.getMessage();
26035         }
26036       }
26037       /**
26038        * <code>optional .hbase.pb.MutationProto mutation = 2;</code>
26039        */
26040       public Builder setMutation(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto value) {
26041         if (mutationBuilder_ == null) {
26042           if (value == null) {
26043             throw new NullPointerException();
26044           }
26045           mutation_ = value;
26046           onChanged();
26047         } else {
26048           mutationBuilder_.setMessage(value);
26049         }
26050         bitField0_ |= 0x00000002;
26051         return this;
26052       }
26053       /**
26054        * <code>optional .hbase.pb.MutationProto mutation = 2;</code>
26055        */
26056       public Builder setMutation(
26057           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder builderForValue) {
26058         if (mutationBuilder_ == null) {
26059           mutation_ = builderForValue.build();
26060           onChanged();
26061         } else {
26062           mutationBuilder_.setMessage(builderForValue.build());
26063         }
26064         bitField0_ |= 0x00000002;
26065         return this;
26066       }
26067       /**
26068        * <code>optional .hbase.pb.MutationProto mutation = 2;</code>
26069        */
26070       public Builder mergeMutation(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto value) {
26071         if (mutationBuilder_ == null) {
26072           if (((bitField0_ & 0x00000002) == 0x00000002) &&
26073               mutation_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance()) {
26074             mutation_ =
26075               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.newBuilder(mutation_).mergeFrom(value).buildPartial();
26076           } else {
26077             mutation_ = value;
26078           }
26079           onChanged();
26080         } else {
26081           mutationBuilder_.mergeFrom(value);
26082         }
26083         bitField0_ |= 0x00000002;
26084         return this;
26085       }
26086       /**
26087        * <code>optional .hbase.pb.MutationProto mutation = 2;</code>
26088        */
26089       public Builder clearMutation() {
26090         if (mutationBuilder_ == null) {
26091           mutation_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance();
26092           onChanged();
26093         } else {
26094           mutationBuilder_.clear();
26095         }
26096         bitField0_ = (bitField0_ & ~0x00000002);
26097         return this;
26098       }
26099       /**
26100        * <code>optional .hbase.pb.MutationProto mutation = 2;</code>
26101        */
26102       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder getMutationBuilder() {
26103         bitField0_ |= 0x00000002;
26104         onChanged();
26105         return getMutationFieldBuilder().getBuilder();
26106       }
26107       /**
26108        * <code>optional .hbase.pb.MutationProto mutation = 2;</code>
26109        */
26110       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationOrBuilder() {
26111         if (mutationBuilder_ != null) {
26112           return mutationBuilder_.getMessageOrBuilder();
26113         } else {
26114           return mutation_;
26115         }
26116       }
26117       /**
26118        * <code>optional .hbase.pb.MutationProto mutation = 2;</code>
26119        */
26120       private com.google.protobuf.SingleFieldBuilder<
26121           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder> 
26122           getMutationFieldBuilder() {
26123         if (mutationBuilder_ == null) {
26124           mutationBuilder_ = new com.google.protobuf.SingleFieldBuilder<
26125               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder>(
26126                   mutation_,
26127                   getParentForChildren(),
26128                   isClean());
26129           mutation_ = null;
26130         }
26131         return mutationBuilder_;
26132       }
26133 
26134       // optional .hbase.pb.Get get = 3;
26135       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance();
26136       private com.google.protobuf.SingleFieldBuilder<
26137           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder> getBuilder_;
26138       /**
26139        * <code>optional .hbase.pb.Get get = 3;</code>
26140        */
26141       public boolean hasGet() {
26142         return ((bitField0_ & 0x00000004) == 0x00000004);
26143       }
26144       /**
26145        * <code>optional .hbase.pb.Get get = 3;</code>
26146        */
26147       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet() {
26148         if (getBuilder_ == null) {
26149           return get_;
26150         } else {
26151           return getBuilder_.getMessage();
26152         }
26153       }
26154       /**
26155        * <code>optional .hbase.pb.Get get = 3;</code>
26156        */
26157       public Builder setGet(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get value) {
26158         if (getBuilder_ == null) {
26159           if (value == null) {
26160             throw new NullPointerException();
26161           }
26162           get_ = value;
26163           onChanged();
26164         } else {
26165           getBuilder_.setMessage(value);
26166         }
26167         bitField0_ |= 0x00000004;
26168         return this;
26169       }
26170       /**
26171        * <code>optional .hbase.pb.Get get = 3;</code>
26172        */
26173       public Builder setGet(
26174           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder builderForValue) {
26175         if (getBuilder_ == null) {
26176           get_ = builderForValue.build();
26177           onChanged();
26178         } else {
26179           getBuilder_.setMessage(builderForValue.build());
26180         }
26181         bitField0_ |= 0x00000004;
26182         return this;
26183       }
26184       /**
26185        * <code>optional .hbase.pb.Get get = 3;</code>
26186        */
26187       public Builder mergeGet(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get value) {
26188         if (getBuilder_ == null) {
26189           if (((bitField0_ & 0x00000004) == 0x00000004) &&
26190               get_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance()) {
26191             get_ =
26192               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.newBuilder(get_).mergeFrom(value).buildPartial();
26193           } else {
26194             get_ = value;
26195           }
26196           onChanged();
26197         } else {
26198           getBuilder_.mergeFrom(value);
26199         }
26200         bitField0_ |= 0x00000004;
26201         return this;
26202       }
26203       /**
26204        * <code>optional .hbase.pb.Get get = 3;</code>
26205        */
26206       public Builder clearGet() {
26207         if (getBuilder_ == null) {
26208           get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance();
26209           onChanged();
26210         } else {
26211           getBuilder_.clear();
26212         }
26213         bitField0_ = (bitField0_ & ~0x00000004);
26214         return this;
26215       }
26216       /**
26217        * <code>optional .hbase.pb.Get get = 3;</code>
26218        */
26219       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder getGetBuilder() {
26220         bitField0_ |= 0x00000004;
26221         onChanged();
26222         return getGetFieldBuilder().getBuilder();
26223       }
26224       /**
26225        * <code>optional .hbase.pb.Get get = 3;</code>
26226        */
26227       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder() {
26228         if (getBuilder_ != null) {
26229           return getBuilder_.getMessageOrBuilder();
26230         } else {
26231           return get_;
26232         }
26233       }
26234       /**
26235        * <code>optional .hbase.pb.Get get = 3;</code>
26236        */
26237       private com.google.protobuf.SingleFieldBuilder<
26238           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder> 
26239           getGetFieldBuilder() {
26240         if (getBuilder_ == null) {
26241           getBuilder_ = new com.google.protobuf.SingleFieldBuilder<
26242               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder>(
26243                   get_,
26244                   getParentForChildren(),
26245                   isClean());
26246           get_ = null;
26247         }
26248         return getBuilder_;
26249       }
26250 
26251       // optional .hbase.pb.CoprocessorServiceCall service_call = 4;
26252       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall serviceCall_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance();
26253       private com.google.protobuf.SingleFieldBuilder<
26254           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder> serviceCallBuilder_;
26255       /**
26256        * <code>optional .hbase.pb.CoprocessorServiceCall service_call = 4;</code>
26257        */
26258       public boolean hasServiceCall() {
26259         return ((bitField0_ & 0x00000008) == 0x00000008);
26260       }
26261       /**
26262        * <code>optional .hbase.pb.CoprocessorServiceCall service_call = 4;</code>
26263        */
26264       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall getServiceCall() {
26265         if (serviceCallBuilder_ == null) {
26266           return serviceCall_;
26267         } else {
26268           return serviceCallBuilder_.getMessage();
26269         }
26270       }
26271       /**
26272        * <code>optional .hbase.pb.CoprocessorServiceCall service_call = 4;</code>
26273        */
26274       public Builder setServiceCall(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall value) {
26275         if (serviceCallBuilder_ == null) {
26276           if (value == null) {
26277             throw new NullPointerException();
26278           }
26279           serviceCall_ = value;
26280           onChanged();
26281         } else {
26282           serviceCallBuilder_.setMessage(value);
26283         }
26284         bitField0_ |= 0x00000008;
26285         return this;
26286       }
26287       /**
26288        * <code>optional .hbase.pb.CoprocessorServiceCall service_call = 4;</code>
26289        */
26290       public Builder setServiceCall(
26291           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder builderForValue) {
26292         if (serviceCallBuilder_ == null) {
26293           serviceCall_ = builderForValue.build();
26294           onChanged();
26295         } else {
26296           serviceCallBuilder_.setMessage(builderForValue.build());
26297         }
26298         bitField0_ |= 0x00000008;
26299         return this;
26300       }
26301       /**
26302        * <code>optional .hbase.pb.CoprocessorServiceCall service_call = 4;</code>
26303        */
26304       public Builder mergeServiceCall(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall value) {
26305         if (serviceCallBuilder_ == null) {
26306           if (((bitField0_ & 0x00000008) == 0x00000008) &&
26307               serviceCall_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance()) {
26308             serviceCall_ =
26309               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.newBuilder(serviceCall_).mergeFrom(value).buildPartial();
26310           } else {
26311             serviceCall_ = value;
26312           }
26313           onChanged();
26314         } else {
26315           serviceCallBuilder_.mergeFrom(value);
26316         }
26317         bitField0_ |= 0x00000008;
26318         return this;
26319       }
26320       /**
26321        * <code>optional .hbase.pb.CoprocessorServiceCall service_call = 4;</code>
26322        */
26323       public Builder clearServiceCall() {
26324         if (serviceCallBuilder_ == null) {
26325           serviceCall_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance();
26326           onChanged();
26327         } else {
26328           serviceCallBuilder_.clear();
26329         }
26330         bitField0_ = (bitField0_ & ~0x00000008);
26331         return this;
26332       }
26333       /**
26334        * <code>optional .hbase.pb.CoprocessorServiceCall service_call = 4;</code>
26335        */
26336       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder getServiceCallBuilder() {
26337         bitField0_ |= 0x00000008;
26338         onChanged();
26339         return getServiceCallFieldBuilder().getBuilder();
26340       }
26341       /**
26342        * <code>optional .hbase.pb.CoprocessorServiceCall service_call = 4;</code>
26343        */
26344       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder getServiceCallOrBuilder() {
26345         if (serviceCallBuilder_ != null) {
26346           return serviceCallBuilder_.getMessageOrBuilder();
26347         } else {
26348           return serviceCall_;
26349         }
26350       }
26351       /**
26352        * <code>optional .hbase.pb.CoprocessorServiceCall service_call = 4;</code>
26353        */
26354       private com.google.protobuf.SingleFieldBuilder<
26355           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder> 
26356           getServiceCallFieldBuilder() {
26357         if (serviceCallBuilder_ == null) {
26358           serviceCallBuilder_ = new com.google.protobuf.SingleFieldBuilder<
26359               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder>(
26360                   serviceCall_,
26361                   getParentForChildren(),
26362                   isClean());
26363           serviceCall_ = null;
26364         }
26365         return serviceCallBuilder_;
26366       }
26367 
26368       // @@protoc_insertion_point(builder_scope:hbase.pb.Action)
26369     }
26370 
26371     static {
26372       defaultInstance = new Action(true);
26373       defaultInstance.initFields();
26374     }
26375 
26376     // @@protoc_insertion_point(class_scope:hbase.pb.Action)
26377   }
26378 
26379   public interface RegionActionOrBuilder
26380       extends com.google.protobuf.MessageOrBuilder {
26381 
26382     // required .hbase.pb.RegionSpecifier region = 1;
26383     /**
26384      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
26385      */
26386     boolean hasRegion();
26387     /**
26388      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
26389      */
26390     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
26391     /**
26392      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
26393      */
26394     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
26395 
26396     // optional bool atomic = 2;
26397     /**
26398      * <code>optional bool atomic = 2;</code>
26399      *
26400      * <pre>
26401      * When set, run mutations as atomic unit.
26402      * </pre>
26403      */
26404     boolean hasAtomic();
26405     /**
26406      * <code>optional bool atomic = 2;</code>
26407      *
26408      * <pre>
26409      * When set, run mutations as atomic unit.
26410      * </pre>
26411      */
26412     boolean getAtomic();
26413 
26414     // repeated .hbase.pb.Action action = 3;
26415     /**
26416      * <code>repeated .hbase.pb.Action action = 3;</code>
26417      */
26418     java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action> 
26419         getActionList();
26420     /**
26421      * <code>repeated .hbase.pb.Action action = 3;</code>
26422      */
26423     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action getAction(int index);
26424     /**
26425      * <code>repeated .hbase.pb.Action action = 3;</code>
26426      */
26427     int getActionCount();
26428     /**
26429      * <code>repeated .hbase.pb.Action action = 3;</code>
26430      */
26431     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionOrBuilder> 
26432         getActionOrBuilderList();
26433     /**
26434      * <code>repeated .hbase.pb.Action action = 3;</code>
26435      */
26436     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionOrBuilder getActionOrBuilder(
26437         int index);
26438   }
26439   /**
26440    * Protobuf type {@code hbase.pb.RegionAction}
26441    *
26442    * <pre>
26443    **
26444    * Actions to run against a Region.
26445    * </pre>
26446    */
26447   public static final class RegionAction extends
26448       com.google.protobuf.GeneratedMessage
26449       implements RegionActionOrBuilder {
26450     // Use RegionAction.newBuilder() to construct.
26451     private RegionAction(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
26452       super(builder);
26453       this.unknownFields = builder.getUnknownFields();
26454     }
26455     private RegionAction(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
26456 
26457     private static final RegionAction defaultInstance;
26458     public static RegionAction getDefaultInstance() {
26459       return defaultInstance;
26460     }
26461 
26462     public RegionAction getDefaultInstanceForType() {
26463       return defaultInstance;
26464     }
26465 
26466     private final com.google.protobuf.UnknownFieldSet unknownFields;
26467     @java.lang.Override
26468     public final com.google.protobuf.UnknownFieldSet
26469         getUnknownFields() {
26470       return this.unknownFields;
26471     }
26472     private RegionAction(
26473         com.google.protobuf.CodedInputStream input,
26474         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
26475         throws com.google.protobuf.InvalidProtocolBufferException {
26476       initFields();
26477       int mutable_bitField0_ = 0;
26478       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
26479           com.google.protobuf.UnknownFieldSet.newBuilder();
26480       try {
26481         boolean done = false;
26482         while (!done) {
26483           int tag = input.readTag();
26484           switch (tag) {
26485             case 0:
26486               done = true;
26487               break;
26488             default: {
26489               if (!parseUnknownField(input, unknownFields,
26490                                      extensionRegistry, tag)) {
26491                 done = true;
26492               }
26493               break;
26494             }
26495             case 10: {
26496               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null;
26497               if (((bitField0_ & 0x00000001) == 0x00000001)) {
26498                 subBuilder = region_.toBuilder();
26499               }
26500               region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry);
26501               if (subBuilder != null) {
26502                 subBuilder.mergeFrom(region_);
26503                 region_ = subBuilder.buildPartial();
26504               }
26505               bitField0_ |= 0x00000001;
26506               break;
26507             }
26508             case 16: {
26509               bitField0_ |= 0x00000002;
26510               atomic_ = input.readBool();
26511               break;
26512             }
26513             case 26: {
26514               if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
26515                 action_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action>();
26516                 mutable_bitField0_ |= 0x00000004;
26517               }
26518               action_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.PARSER, extensionRegistry));
26519               break;
26520             }
26521           }
26522         }
26523       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
26524         throw e.setUnfinishedMessage(this);
26525       } catch (java.io.IOException e) {
26526         throw new com.google.protobuf.InvalidProtocolBufferException(
26527             e.getMessage()).setUnfinishedMessage(this);
26528       } finally {
26529         if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
26530           action_ = java.util.Collections.unmodifiableList(action_);
26531         }
26532         this.unknownFields = unknownFields.build();
26533         makeExtensionsImmutable();
26534       }
26535     }
26536     public static final com.google.protobuf.Descriptors.Descriptor
26537         getDescriptor() {
26538       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionAction_descriptor;
26539     }
26540 
26541     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
26542         internalGetFieldAccessorTable() {
26543       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionAction_fieldAccessorTable
26544           .ensureFieldAccessorsInitialized(
26545               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.Builder.class);
26546     }
26547 
26548     public static com.google.protobuf.Parser<RegionAction> PARSER =
26549         new com.google.protobuf.AbstractParser<RegionAction>() {
26550       public RegionAction parsePartialFrom(
26551           com.google.protobuf.CodedInputStream input,
26552           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
26553           throws com.google.protobuf.InvalidProtocolBufferException {
26554         return new RegionAction(input, extensionRegistry);
26555       }
26556     };
26557 
26558     @java.lang.Override
26559     public com.google.protobuf.Parser<RegionAction> getParserForType() {
26560       return PARSER;
26561     }
26562 
26563     private int bitField0_;
26564     // required .hbase.pb.RegionSpecifier region = 1;
26565     public static final int REGION_FIELD_NUMBER = 1;
26566     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_;
26567     /**
26568      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
26569      */
26570     public boolean hasRegion() {
26571       return ((bitField0_ & 0x00000001) == 0x00000001);
26572     }
26573     /**
26574      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
26575      */
26576     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
26577       return region_;
26578     }
26579     /**
26580      * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
26581      */
26582     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
26583       return region_;
26584     }
26585 
26586     // optional bool atomic = 2;
26587     public static final int ATOMIC_FIELD_NUMBER = 2;
26588     private boolean atomic_;
26589     /**
26590      * <code>optional bool atomic = 2;</code>
26591      *
26592      * <pre>
26593      * When set, run mutations as atomic unit.
26594      * </pre>
26595      */
26596     public boolean hasAtomic() {
26597       return ((bitField0_ & 0x00000002) == 0x00000002);
26598     }
26599     /**
26600      * <code>optional bool atomic = 2;</code>
26601      *
26602      * <pre>
26603      * When set, run mutations as atomic unit.
26604      * </pre>
26605      */
26606     public boolean getAtomic() {
26607       return atomic_;
26608     }
26609 
26610     // repeated .hbase.pb.Action action = 3;
26611     public static final int ACTION_FIELD_NUMBER = 3;
26612     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action> action_;
26613     /**
26614      * <code>repeated .hbase.pb.Action action = 3;</code>
26615      */
26616     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action> getActionList() {
26617       return action_;
26618     }
26619     /**
26620      * <code>repeated .hbase.pb.Action action = 3;</code>
26621      */
26622     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionOrBuilder> 
26623         getActionOrBuilderList() {
26624       return action_;
26625     }
26626     /**
26627      * <code>repeated .hbase.pb.Action action = 3;</code>
26628      */
26629     public int getActionCount() {
26630       return action_.size();
26631     }
26632     /**
26633      * <code>repeated .hbase.pb.Action action = 3;</code>
26634      */
26635     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action getAction(int index) {
26636       return action_.get(index);
26637     }
26638     /**
26639      * <code>repeated .hbase.pb.Action action = 3;</code>
26640      */
26641     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionOrBuilder getActionOrBuilder(
26642         int index) {
26643       return action_.get(index);
26644     }
26645 
26646     private void initFields() {
26647       region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
26648       atomic_ = false;
26649       action_ = java.util.Collections.emptyList();
26650     }
26651     private byte memoizedIsInitialized = -1;
26652     public final boolean isInitialized() {
26653       byte isInitialized = memoizedIsInitialized;
26654       if (isInitialized != -1) return isInitialized == 1;
26655 
26656       if (!hasRegion()) {
26657         memoizedIsInitialized = 0;
26658         return false;
26659       }
26660       if (!getRegion().isInitialized()) {
26661         memoizedIsInitialized = 0;
26662         return false;
26663       }
26664       for (int i = 0; i < getActionCount(); i++) {
26665         if (!getAction(i).isInitialized()) {
26666           memoizedIsInitialized = 0;
26667           return false;
26668         }
26669       }
26670       memoizedIsInitialized = 1;
26671       return true;
26672     }
26673 
26674     public void writeTo(com.google.protobuf.CodedOutputStream output)
26675                         throws java.io.IOException {
26676       getSerializedSize();
26677       if (((bitField0_ & 0x00000001) == 0x00000001)) {
26678         output.writeMessage(1, region_);
26679       }
26680       if (((bitField0_ & 0x00000002) == 0x00000002)) {
26681         output.writeBool(2, atomic_);
26682       }
26683       for (int i = 0; i < action_.size(); i++) {
26684         output.writeMessage(3, action_.get(i));
26685       }
26686       getUnknownFields().writeTo(output);
26687     }
26688 
26689     private int memoizedSerializedSize = -1;
26690     public int getSerializedSize() {
26691       int size = memoizedSerializedSize;
26692       if (size != -1) return size;
26693 
26694       size = 0;
26695       if (((bitField0_ & 0x00000001) == 0x00000001)) {
26696         size += com.google.protobuf.CodedOutputStream
26697           .computeMessageSize(1, region_);
26698       }
26699       if (((bitField0_ & 0x00000002) == 0x00000002)) {
26700         size += com.google.protobuf.CodedOutputStream
26701           .computeBoolSize(2, atomic_);
26702       }
26703       for (int i = 0; i < action_.size(); i++) {
26704         size += com.google.protobuf.CodedOutputStream
26705           .computeMessageSize(3, action_.get(i));
26706       }
26707       size += getUnknownFields().getSerializedSize();
26708       memoizedSerializedSize = size;
26709       return size;
26710     }
26711 
26712     private static final long serialVersionUID = 0L;
26713     @java.lang.Override
26714     protected java.lang.Object writeReplace()
26715         throws java.io.ObjectStreamException {
26716       return super.writeReplace();
26717     }
26718 
26719     @java.lang.Override
26720     public boolean equals(final java.lang.Object obj) {
26721       if (obj == this) {
26722        return true;
26723       }
26724       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction)) {
26725         return super.equals(obj);
26726       }
26727       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction) obj;
26728 
26729       boolean result = true;
26730       result = result && (hasRegion() == other.hasRegion());
26731       if (hasRegion()) {
26732         result = result && getRegion()
26733             .equals(other.getRegion());
26734       }
26735       result = result && (hasAtomic() == other.hasAtomic());
26736       if (hasAtomic()) {
26737         result = result && (getAtomic()
26738             == other.getAtomic());
26739       }
26740       result = result && getActionList()
26741           .equals(other.getActionList());
26742       result = result &&
26743           getUnknownFields().equals(other.getUnknownFields());
26744       return result;
26745     }
26746 
26747     private int memoizedHashCode = 0;
26748     @java.lang.Override
26749     public int hashCode() {
26750       if (memoizedHashCode != 0) {
26751         return memoizedHashCode;
26752       }
26753       int hash = 41;
26754       hash = (19 * hash) + getDescriptorForType().hashCode();
26755       if (hasRegion()) {
26756         hash = (37 * hash) + REGION_FIELD_NUMBER;
26757         hash = (53 * hash) + getRegion().hashCode();
26758       }
26759       if (hasAtomic()) {
26760         hash = (37 * hash) + ATOMIC_FIELD_NUMBER;
26761         hash = (53 * hash) + hashBoolean(getAtomic());
26762       }
26763       if (getActionCount() > 0) {
26764         hash = (37 * hash) + ACTION_FIELD_NUMBER;
26765         hash = (53 * hash) + getActionList().hashCode();
26766       }
26767       hash = (29 * hash) + getUnknownFields().hashCode();
26768       memoizedHashCode = hash;
26769       return hash;
26770     }
26771 
26772     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction parseFrom(
26773         com.google.protobuf.ByteString data)
26774         throws com.google.protobuf.InvalidProtocolBufferException {
26775       return PARSER.parseFrom(data);
26776     }
26777     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction parseFrom(
26778         com.google.protobuf.ByteString data,
26779         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
26780         throws com.google.protobuf.InvalidProtocolBufferException {
26781       return PARSER.parseFrom(data, extensionRegistry);
26782     }
26783     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction parseFrom(byte[] data)
26784         throws com.google.protobuf.InvalidProtocolBufferException {
26785       return PARSER.parseFrom(data);
26786     }
26787     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction parseFrom(
26788         byte[] data,
26789         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
26790         throws com.google.protobuf.InvalidProtocolBufferException {
26791       return PARSER.parseFrom(data, extensionRegistry);
26792     }
26793     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction parseFrom(java.io.InputStream input)
26794         throws java.io.IOException {
26795       return PARSER.parseFrom(input);
26796     }
26797     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction parseFrom(
26798         java.io.InputStream input,
26799         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
26800         throws java.io.IOException {
26801       return PARSER.parseFrom(input, extensionRegistry);
26802     }
26803     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction parseDelimitedFrom(java.io.InputStream input)
26804         throws java.io.IOException {
26805       return PARSER.parseDelimitedFrom(input);
26806     }
26807     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction parseDelimitedFrom(
26808         java.io.InputStream input,
26809         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
26810         throws java.io.IOException {
26811       return PARSER.parseDelimitedFrom(input, extensionRegistry);
26812     }
26813     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction parseFrom(
26814         com.google.protobuf.CodedInputStream input)
26815         throws java.io.IOException {
26816       return PARSER.parseFrom(input);
26817     }
26818     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction parseFrom(
26819         com.google.protobuf.CodedInputStream input,
26820         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
26821         throws java.io.IOException {
26822       return PARSER.parseFrom(input, extensionRegistry);
26823     }
26824 
26825     public static Builder newBuilder() { return Builder.create(); }
26826     public Builder newBuilderForType() { return newBuilder(); }
26827     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction prototype) {
26828       return newBuilder().mergeFrom(prototype);
26829     }
26830     public Builder toBuilder() { return newBuilder(this); }
26831 
26832     @java.lang.Override
26833     protected Builder newBuilderForType(
26834         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
26835       Builder builder = new Builder(parent);
26836       return builder;
26837     }
26838     /**
26839      * Protobuf type {@code hbase.pb.RegionAction}
26840      *
26841      * <pre>
26842      **
26843      * Actions to run against a Region.
26844      * </pre>
26845      */
26846     public static final class Builder extends
26847         com.google.protobuf.GeneratedMessage.Builder<Builder>
26848        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionOrBuilder {
26849       public static final com.google.protobuf.Descriptors.Descriptor
26850           getDescriptor() {
26851         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionAction_descriptor;
26852       }
26853 
26854       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
26855           internalGetFieldAccessorTable() {
26856         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionAction_fieldAccessorTable
26857             .ensureFieldAccessorsInitialized(
26858                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.Builder.class);
26859       }
26860 
26861       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.newBuilder()
26862       private Builder() {
26863         maybeForceBuilderInitialization();
26864       }
26865 
26866       private Builder(
26867           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
26868         super(parent);
26869         maybeForceBuilderInitialization();
26870       }
26871       private void maybeForceBuilderInitialization() {
26872         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
26873           getRegionFieldBuilder();
26874           getActionFieldBuilder();
26875         }
26876       }
26877       private static Builder create() {
26878         return new Builder();
26879       }
26880 
26881       public Builder clear() {
26882         super.clear();
26883         if (regionBuilder_ == null) {
26884           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
26885         } else {
26886           regionBuilder_.clear();
26887         }
26888         bitField0_ = (bitField0_ & ~0x00000001);
26889         atomic_ = false;
26890         bitField0_ = (bitField0_ & ~0x00000002);
26891         if (actionBuilder_ == null) {
26892           action_ = java.util.Collections.emptyList();
26893           bitField0_ = (bitField0_ & ~0x00000004);
26894         } else {
26895           actionBuilder_.clear();
26896         }
26897         return this;
26898       }
26899 
26900       public Builder clone() {
26901         return create().mergeFrom(buildPartial());
26902       }
26903 
26904       public com.google.protobuf.Descriptors.Descriptor
26905           getDescriptorForType() {
26906         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionAction_descriptor;
26907       }
26908 
26909       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction getDefaultInstanceForType() {
26910         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.getDefaultInstance();
26911       }
26912 
26913       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction build() {
26914         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction result = buildPartial();
26915         if (!result.isInitialized()) {
26916           throw newUninitializedMessageException(result);
26917         }
26918         return result;
26919       }
26920 
26921       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction buildPartial() {
26922         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction(this);
26923         int from_bitField0_ = bitField0_;
26924         int to_bitField0_ = 0;
26925         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
26926           to_bitField0_ |= 0x00000001;
26927         }
26928         if (regionBuilder_ == null) {
26929           result.region_ = region_;
26930         } else {
26931           result.region_ = regionBuilder_.build();
26932         }
26933         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
26934           to_bitField0_ |= 0x00000002;
26935         }
26936         result.atomic_ = atomic_;
26937         if (actionBuilder_ == null) {
26938           if (((bitField0_ & 0x00000004) == 0x00000004)) {
26939             action_ = java.util.Collections.unmodifiableList(action_);
26940             bitField0_ = (bitField0_ & ~0x00000004);
26941           }
26942           result.action_ = action_;
26943         } else {
26944           result.action_ = actionBuilder_.build();
26945         }
26946         result.bitField0_ = to_bitField0_;
26947         onBuilt();
26948         return result;
26949       }
26950 
26951       public Builder mergeFrom(com.google.protobuf.Message other) {
26952         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction) {
26953           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction)other);
26954         } else {
26955           super.mergeFrom(other);
26956           return this;
26957         }
26958       }
26959 
26960       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction other) {
26961         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.getDefaultInstance()) return this;
26962         if (other.hasRegion()) {
26963           mergeRegion(other.getRegion());
26964         }
26965         if (other.hasAtomic()) {
26966           setAtomic(other.getAtomic());
26967         }
26968         if (actionBuilder_ == null) {
26969           if (!other.action_.isEmpty()) {
26970             if (action_.isEmpty()) {
26971               action_ = other.action_;
26972               bitField0_ = (bitField0_ & ~0x00000004);
26973             } else {
26974               ensureActionIsMutable();
26975               action_.addAll(other.action_);
26976             }
26977             onChanged();
26978           }
26979         } else {
26980           if (!other.action_.isEmpty()) {
26981             if (actionBuilder_.isEmpty()) {
26982               actionBuilder_.dispose();
26983               actionBuilder_ = null;
26984               action_ = other.action_;
26985               bitField0_ = (bitField0_ & ~0x00000004);
26986               actionBuilder_ = 
26987                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
26988                    getActionFieldBuilder() : null;
26989             } else {
26990               actionBuilder_.addAllMessages(other.action_);
26991             }
26992           }
26993         }
26994         this.mergeUnknownFields(other.getUnknownFields());
26995         return this;
26996       }
26997 
26998       public final boolean isInitialized() {
26999         if (!hasRegion()) {
27000           
27001           return false;
27002         }
27003         if (!getRegion().isInitialized()) {
27004           
27005           return false;
27006         }
27007         for (int i = 0; i < getActionCount(); i++) {
27008           if (!getAction(i).isInitialized()) {
27009             
27010             return false;
27011           }
27012         }
27013         return true;
27014       }
27015 
27016       public Builder mergeFrom(
27017           com.google.protobuf.CodedInputStream input,
27018           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
27019           throws java.io.IOException {
27020         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction parsedMessage = null;
27021         try {
27022           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
27023         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
27024           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction) e.getUnfinishedMessage();
27025           throw e;
27026         } finally {
27027           if (parsedMessage != null) {
27028             mergeFrom(parsedMessage);
27029           }
27030         }
27031         return this;
27032       }
27033       private int bitField0_;
27034 
27035       // required .hbase.pb.RegionSpecifier region = 1;
27036       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
27037       private com.google.protobuf.SingleFieldBuilder<
27038           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_;
27039       /**
27040        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
27041        */
27042       public boolean hasRegion() {
27043         return ((bitField0_ & 0x00000001) == 0x00000001);
27044       }
27045       /**
27046        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
27047        */
27048       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
27049         if (regionBuilder_ == null) {
27050           return region_;
27051         } else {
27052           return regionBuilder_.getMessage();
27053         }
27054       }
27055       /**
27056        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
27057        */
27058       public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
27059         if (regionBuilder_ == null) {
27060           if (value == null) {
27061             throw new NullPointerException();
27062           }
27063           region_ = value;
27064           onChanged();
27065         } else {
27066           regionBuilder_.setMessage(value);
27067         }
27068         bitField0_ |= 0x00000001;
27069         return this;
27070       }
27071       /**
27072        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
27073        */
27074       public Builder setRegion(
27075           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
27076         if (regionBuilder_ == null) {
27077           region_ = builderForValue.build();
27078           onChanged();
27079         } else {
27080           regionBuilder_.setMessage(builderForValue.build());
27081         }
27082         bitField0_ |= 0x00000001;
27083         return this;
27084       }
27085       /**
27086        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
27087        */
27088       public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
27089         if (regionBuilder_ == null) {
27090           if (((bitField0_ & 0x00000001) == 0x00000001) &&
27091               region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) {
27092             region_ =
27093               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial();
27094           } else {
27095             region_ = value;
27096           }
27097           onChanged();
27098         } else {
27099           regionBuilder_.mergeFrom(value);
27100         }
27101         bitField0_ |= 0x00000001;
27102         return this;
27103       }
27104       /**
27105        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
27106        */
27107       public Builder clearRegion() {
27108         if (regionBuilder_ == null) {
27109           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
27110           onChanged();
27111         } else {
27112           regionBuilder_.clear();
27113         }
27114         bitField0_ = (bitField0_ & ~0x00000001);
27115         return this;
27116       }
27117       /**
27118        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
27119        */
27120       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() {
27121         bitField0_ |= 0x00000001;
27122         onChanged();
27123         return getRegionFieldBuilder().getBuilder();
27124       }
27125       /**
27126        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
27127        */
27128       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
27129         if (regionBuilder_ != null) {
27130           return regionBuilder_.getMessageOrBuilder();
27131         } else {
27132           return region_;
27133         }
27134       }
27135       /**
27136        * <code>required .hbase.pb.RegionSpecifier region = 1;</code>
27137        */
27138       private com.google.protobuf.SingleFieldBuilder<
27139           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> 
27140           getRegionFieldBuilder() {
27141         if (regionBuilder_ == null) {
27142           regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
27143               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
27144                   region_,
27145                   getParentForChildren(),
27146                   isClean());
27147           region_ = null;
27148         }
27149         return regionBuilder_;
27150       }
27151 
27152       // optional bool atomic = 2;
27153       private boolean atomic_ ;
27154       /**
27155        * <code>optional bool atomic = 2;</code>
27156        *
27157        * <pre>
27158        * When set, run mutations as atomic unit.
27159        * </pre>
27160        */
27161       public boolean hasAtomic() {
27162         return ((bitField0_ & 0x00000002) == 0x00000002);
27163       }
27164       /**
27165        * <code>optional bool atomic = 2;</code>
27166        *
27167        * <pre>
27168        * When set, run mutations as atomic unit.
27169        * </pre>
27170        */
27171       public boolean getAtomic() {
27172         return atomic_;
27173       }
27174       /**
27175        * <code>optional bool atomic = 2;</code>
27176        *
27177        * <pre>
27178        * When set, run mutations as atomic unit.
27179        * </pre>
27180        */
27181       public Builder setAtomic(boolean value) {
27182         bitField0_ |= 0x00000002;
27183         atomic_ = value;
27184         onChanged();
27185         return this;
27186       }
27187       /**
27188        * <code>optional bool atomic = 2;</code>
27189        *
27190        * <pre>
27191        * When set, run mutations as atomic unit.
27192        * </pre>
27193        */
27194       public Builder clearAtomic() {
27195         bitField0_ = (bitField0_ & ~0x00000002);
27196         atomic_ = false;
27197         onChanged();
27198         return this;
27199       }
27200 
27201       // repeated .hbase.pb.Action action = 3;
27202       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action> action_ =
27203         java.util.Collections.emptyList();
27204       private void ensureActionIsMutable() {
27205         if (!((bitField0_ & 0x00000004) == 0x00000004)) {
27206           action_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action>(action_);
27207           bitField0_ |= 0x00000004;
27208          }
27209       }
27210 
27211       private com.google.protobuf.RepeatedFieldBuilder<
27212           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionOrBuilder> actionBuilder_;
27213 
27214       /**
27215        * <code>repeated .hbase.pb.Action action = 3;</code>
27216        */
27217       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action> getActionList() {
27218         if (actionBuilder_ == null) {
27219           return java.util.Collections.unmodifiableList(action_);
27220         } else {
27221           return actionBuilder_.getMessageList();
27222         }
27223       }
27224       /**
27225        * <code>repeated .hbase.pb.Action action = 3;</code>
27226        */
27227       public int getActionCount() {
27228         if (actionBuilder_ == null) {
27229           return action_.size();
27230         } else {
27231           return actionBuilder_.getCount();
27232         }
27233       }
27234       /**
27235        * <code>repeated .hbase.pb.Action action = 3;</code>
27236        */
27237       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action getAction(int index) {
27238         if (actionBuilder_ == null) {
27239           return action_.get(index);
27240         } else {
27241           return actionBuilder_.getMessage(index);
27242         }
27243       }
27244       /**
27245        * <code>repeated .hbase.pb.Action action = 3;</code>
27246        */
27247       public Builder setAction(
27248           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action value) {
27249         if (actionBuilder_ == null) {
27250           if (value == null) {
27251             throw new NullPointerException();
27252           }
27253           ensureActionIsMutable();
27254           action_.set(index, value);
27255           onChanged();
27256         } else {
27257           actionBuilder_.setMessage(index, value);
27258         }
27259         return this;
27260       }
27261       /**
27262        * <code>repeated .hbase.pb.Action action = 3;</code>
27263        */
27264       public Builder setAction(
27265           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.Builder builderForValue) {
27266         if (actionBuilder_ == null) {
27267           ensureActionIsMutable();
27268           action_.set(index, builderForValue.build());
27269           onChanged();
27270         } else {
27271           actionBuilder_.setMessage(index, builderForValue.build());
27272         }
27273         return this;
27274       }
27275       /**
27276        * <code>repeated .hbase.pb.Action action = 3;</code>
27277        */
27278       public Builder addAction(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action value) {
27279         if (actionBuilder_ == null) {
27280           if (value == null) {
27281             throw new NullPointerException();
27282           }
27283           ensureActionIsMutable();
27284           action_.add(value);
27285           onChanged();
27286         } else {
27287           actionBuilder_.addMessage(value);
27288         }
27289         return this;
27290       }
27291       /**
27292        * <code>repeated .hbase.pb.Action action = 3;</code>
27293        */
27294       public Builder addAction(
27295           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action value) {
27296         if (actionBuilder_ == null) {
27297           if (value == null) {
27298             throw new NullPointerException();
27299           }
27300           ensureActionIsMutable();
27301           action_.add(index, value);
27302           onChanged();
27303         } else {
27304           actionBuilder_.addMessage(index, value);
27305         }
27306         return this;
27307       }
27308       /**
27309        * <code>repeated .hbase.pb.Action action = 3;</code>
27310        */
27311       public Builder addAction(
27312           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.Builder builderForValue) {
27313         if (actionBuilder_ == null) {
27314           ensureActionIsMutable();
27315           action_.add(builderForValue.build());
27316           onChanged();
27317         } else {
27318           actionBuilder_.addMessage(builderForValue.build());
27319         }
27320         return this;
27321       }
27322       /**
27323        * <code>repeated .hbase.pb.Action action = 3;</code>
27324        */
27325       public Builder addAction(
27326           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.Builder builderForValue) {
27327         if (actionBuilder_ == null) {
27328           ensureActionIsMutable();
27329           action_.add(index, builderForValue.build());
27330           onChanged();
27331         } else {
27332           actionBuilder_.addMessage(index, builderForValue.build());
27333         }
27334         return this;
27335       }
27336       /**
27337        * <code>repeated .hbase.pb.Action action = 3;</code>
27338        */
27339       public Builder addAllAction(
27340           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action> values) {
27341         if (actionBuilder_ == null) {
27342           ensureActionIsMutable();
27343           super.addAll(values, action_);
27344           onChanged();
27345         } else {
27346           actionBuilder_.addAllMessages(values);
27347         }
27348         return this;
27349       }
27350       /**
27351        * <code>repeated .hbase.pb.Action action = 3;</code>
27352        */
27353       public Builder clearAction() {
27354         if (actionBuilder_ == null) {
27355           action_ = java.util.Collections.emptyList();
27356           bitField0_ = (bitField0_ & ~0x00000004);
27357           onChanged();
27358         } else {
27359           actionBuilder_.clear();
27360         }
27361         return this;
27362       }
27363       /**
27364        * <code>repeated .hbase.pb.Action action = 3;</code>
27365        */
27366       public Builder removeAction(int index) {
27367         if (actionBuilder_ == null) {
27368           ensureActionIsMutable();
27369           action_.remove(index);
27370           onChanged();
27371         } else {
27372           actionBuilder_.remove(index);
27373         }
27374         return this;
27375       }
27376       /**
27377        * <code>repeated .hbase.pb.Action action = 3;</code>
27378        */
27379       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.Builder getActionBuilder(
27380           int index) {
27381         return getActionFieldBuilder().getBuilder(index);
27382       }
27383       /**
27384        * <code>repeated .hbase.pb.Action action = 3;</code>
27385        */
27386       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionOrBuilder getActionOrBuilder(
27387           int index) {
27388         if (actionBuilder_ == null) {
27389           return action_.get(index);  } else {
27390           return actionBuilder_.getMessageOrBuilder(index);
27391         }
27392       }
27393       /**
27394        * <code>repeated .hbase.pb.Action action = 3;</code>
27395        */
27396       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionOrBuilder> 
27397            getActionOrBuilderList() {
27398         if (actionBuilder_ != null) {
27399           return actionBuilder_.getMessageOrBuilderList();
27400         } else {
27401           return java.util.Collections.unmodifiableList(action_);
27402         }
27403       }
27404       /**
27405        * <code>repeated .hbase.pb.Action action = 3;</code>
27406        */
27407       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.Builder addActionBuilder() {
27408         return getActionFieldBuilder().addBuilder(
27409             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.getDefaultInstance());
27410       }
27411       /**
27412        * <code>repeated .hbase.pb.Action action = 3;</code>
27413        */
27414       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.Builder addActionBuilder(
27415           int index) {
27416         return getActionFieldBuilder().addBuilder(
27417             index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.getDefaultInstance());
27418       }
27419       /**
27420        * <code>repeated .hbase.pb.Action action = 3;</code>
27421        */
27422       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.Builder> 
27423            getActionBuilderList() {
27424         return getActionFieldBuilder().getBuilderList();
27425       }
27426       private com.google.protobuf.RepeatedFieldBuilder<
27427           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionOrBuilder> 
27428           getActionFieldBuilder() {
27429         if (actionBuilder_ == null) {
27430           actionBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
27431               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionOrBuilder>(
27432                   action_,
27433                   ((bitField0_ & 0x00000004) == 0x00000004),
27434                   getParentForChildren(),
27435                   isClean());
27436           action_ = null;
27437         }
27438         return actionBuilder_;
27439       }
27440 
27441       // @@protoc_insertion_point(builder_scope:hbase.pb.RegionAction)
27442     }
27443 
27444     static {
27445       defaultInstance = new RegionAction(true);
27446       defaultInstance.initFields();
27447     }
27448 
27449     // @@protoc_insertion_point(class_scope:hbase.pb.RegionAction)
27450   }
27451 
27452   public interface RegionLoadStatsOrBuilder
27453       extends com.google.protobuf.MessageOrBuilder {
27454 
27455     // optional int32 memstoreLoad = 1 [default = 0];
27456     /**
27457      * <code>optional int32 memstoreLoad = 1 [default = 0];</code>
27458      *
27459      * <pre>
27460      * Percent load on the memstore. Guaranteed to be positive, between 0 and 100.
27461      * </pre>
27462      */
27463     boolean hasMemstoreLoad();
27464     /**
27465      * <code>optional int32 memstoreLoad = 1 [default = 0];</code>
27466      *
27467      * <pre>
27468      * Percent load on the memstore. Guaranteed to be positive, between 0 and 100.
27469      * </pre>
27470      */
27471     int getMemstoreLoad();
27472 
27473     // optional int32 heapOccupancy = 2 [default = 0];
27474     /**
27475      * <code>optional int32 heapOccupancy = 2 [default = 0];</code>
27476      *
27477      * <pre>
27478      * Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100.
27479      * We can move this to "ServerLoadStats" should we develop them.
27480      * </pre>
27481      */
27482     boolean hasHeapOccupancy();
27483     /**
27484      * <code>optional int32 heapOccupancy = 2 [default = 0];</code>
27485      *
27486      * <pre>
27487      * Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100.
27488      * We can move this to "ServerLoadStats" should we develop them.
27489      * </pre>
27490      */
27491     int getHeapOccupancy();
27492   }
27493   /**
27494    * Protobuf type {@code hbase.pb.RegionLoadStats}
27495    *
27496    * <pre>
27497    *
27498    * Statistics about the current load on the region
27499    * </pre>
27500    */
27501   public static final class RegionLoadStats extends
27502       com.google.protobuf.GeneratedMessage
27503       implements RegionLoadStatsOrBuilder {
27504     // Use RegionLoadStats.newBuilder() to construct.
27505     private RegionLoadStats(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
27506       super(builder);
27507       this.unknownFields = builder.getUnknownFields();
27508     }
27509     private RegionLoadStats(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
27510 
27511     private static final RegionLoadStats defaultInstance;
27512     public static RegionLoadStats getDefaultInstance() {
27513       return defaultInstance;
27514     }
27515 
27516     public RegionLoadStats getDefaultInstanceForType() {
27517       return defaultInstance;
27518     }
27519 
27520     private final com.google.protobuf.UnknownFieldSet unknownFields;
27521     @java.lang.Override
27522     public final com.google.protobuf.UnknownFieldSet
27523         getUnknownFields() {
27524       return this.unknownFields;
27525     }
27526     private RegionLoadStats(
27527         com.google.protobuf.CodedInputStream input,
27528         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
27529         throws com.google.protobuf.InvalidProtocolBufferException {
27530       initFields();
27531       int mutable_bitField0_ = 0;
27532       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
27533           com.google.protobuf.UnknownFieldSet.newBuilder();
27534       try {
27535         boolean done = false;
27536         while (!done) {
27537           int tag = input.readTag();
27538           switch (tag) {
27539             case 0:
27540               done = true;
27541               break;
27542             default: {
27543               if (!parseUnknownField(input, unknownFields,
27544                                      extensionRegistry, tag)) {
27545                 done = true;
27546               }
27547               break;
27548             }
27549             case 8: {
27550               bitField0_ |= 0x00000001;
27551               memstoreLoad_ = input.readInt32();
27552               break;
27553             }
27554             case 16: {
27555               bitField0_ |= 0x00000002;
27556               heapOccupancy_ = input.readInt32();
27557               break;
27558             }
27559           }
27560         }
27561       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
27562         throw e.setUnfinishedMessage(this);
27563       } catch (java.io.IOException e) {
27564         throw new com.google.protobuf.InvalidProtocolBufferException(
27565             e.getMessage()).setUnfinishedMessage(this);
27566       } finally {
27567         this.unknownFields = unknownFields.build();
27568         makeExtensionsImmutable();
27569       }
27570     }
27571     public static final com.google.protobuf.Descriptors.Descriptor
27572         getDescriptor() {
27573       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionLoadStats_descriptor;
27574     }
27575 
27576     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
27577         internalGetFieldAccessorTable() {
27578       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionLoadStats_fieldAccessorTable
27579           .ensureFieldAccessorsInitialized(
27580               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.Builder.class);
27581     }
27582 
27583     public static com.google.protobuf.Parser<RegionLoadStats> PARSER =
27584         new com.google.protobuf.AbstractParser<RegionLoadStats>() {
27585       public RegionLoadStats parsePartialFrom(
27586           com.google.protobuf.CodedInputStream input,
27587           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
27588           throws com.google.protobuf.InvalidProtocolBufferException {
27589         return new RegionLoadStats(input, extensionRegistry);
27590       }
27591     };
27592 
27593     @java.lang.Override
27594     public com.google.protobuf.Parser<RegionLoadStats> getParserForType() {
27595       return PARSER;
27596     }
27597 
27598     private int bitField0_;
27599     // optional int32 memstoreLoad = 1 [default = 0];
27600     public static final int MEMSTORELOAD_FIELD_NUMBER = 1;
27601     private int memstoreLoad_;
27602     /**
27603      * <code>optional int32 memstoreLoad = 1 [default = 0];</code>
27604      *
27605      * <pre>
27606      * Percent load on the memstore. Guaranteed to be positive, between 0 and 100.
27607      * </pre>
27608      */
27609     public boolean hasMemstoreLoad() {
27610       return ((bitField0_ & 0x00000001) == 0x00000001);
27611     }
27612     /**
27613      * <code>optional int32 memstoreLoad = 1 [default = 0];</code>
27614      *
27615      * <pre>
27616      * Percent load on the memstore. Guaranteed to be positive, between 0 and 100.
27617      * </pre>
27618      */
27619     public int getMemstoreLoad() {
27620       return memstoreLoad_;
27621     }
27622 
27623     // optional int32 heapOccupancy = 2 [default = 0];
27624     public static final int HEAPOCCUPANCY_FIELD_NUMBER = 2;
27625     private int heapOccupancy_;
27626     /**
27627      * <code>optional int32 heapOccupancy = 2 [default = 0];</code>
27628      *
27629      * <pre>
27630      * Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100.
27631      * We can move this to "ServerLoadStats" should we develop them.
27632      * </pre>
27633      */
27634     public boolean hasHeapOccupancy() {
27635       return ((bitField0_ & 0x00000002) == 0x00000002);
27636     }
27637     /**
27638      * <code>optional int32 heapOccupancy = 2 [default = 0];</code>
27639      *
27640      * <pre>
27641      * Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100.
27642      * We can move this to "ServerLoadStats" should we develop them.
27643      * </pre>
27644      */
27645     public int getHeapOccupancy() {
27646       return heapOccupancy_;
27647     }
27648 
27649     private void initFields() {
27650       memstoreLoad_ = 0;
27651       heapOccupancy_ = 0;
27652     }
27653     private byte memoizedIsInitialized = -1;
27654     public final boolean isInitialized() {
27655       byte isInitialized = memoizedIsInitialized;
27656       if (isInitialized != -1) return isInitialized == 1;
27657 
27658       memoizedIsInitialized = 1;
27659       return true;
27660     }
27661 
27662     public void writeTo(com.google.protobuf.CodedOutputStream output)
27663                         throws java.io.IOException {
27664       getSerializedSize();
27665       if (((bitField0_ & 0x00000001) == 0x00000001)) {
27666         output.writeInt32(1, memstoreLoad_);
27667       }
27668       if (((bitField0_ & 0x00000002) == 0x00000002)) {
27669         output.writeInt32(2, heapOccupancy_);
27670       }
27671       getUnknownFields().writeTo(output);
27672     }
27673 
27674     private int memoizedSerializedSize = -1;
27675     public int getSerializedSize() {
27676       int size = memoizedSerializedSize;
27677       if (size != -1) return size;
27678 
27679       size = 0;
27680       if (((bitField0_ & 0x00000001) == 0x00000001)) {
27681         size += com.google.protobuf.CodedOutputStream
27682           .computeInt32Size(1, memstoreLoad_);
27683       }
27684       if (((bitField0_ & 0x00000002) == 0x00000002)) {
27685         size += com.google.protobuf.CodedOutputStream
27686           .computeInt32Size(2, heapOccupancy_);
27687       }
27688       size += getUnknownFields().getSerializedSize();
27689       memoizedSerializedSize = size;
27690       return size;
27691     }
27692 
27693     private static final long serialVersionUID = 0L;
27694     @java.lang.Override
27695     protected java.lang.Object writeReplace()
27696         throws java.io.ObjectStreamException {
27697       return super.writeReplace();
27698     }
27699 
27700     @java.lang.Override
27701     public boolean equals(final java.lang.Object obj) {
27702       if (obj == this) {
27703        return true;
27704       }
27705       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats)) {
27706         return super.equals(obj);
27707       }
27708       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats) obj;
27709 
27710       boolean result = true;
27711       result = result && (hasMemstoreLoad() == other.hasMemstoreLoad());
27712       if (hasMemstoreLoad()) {
27713         result = result && (getMemstoreLoad()
27714             == other.getMemstoreLoad());
27715       }
27716       result = result && (hasHeapOccupancy() == other.hasHeapOccupancy());
27717       if (hasHeapOccupancy()) {
27718         result = result && (getHeapOccupancy()
27719             == other.getHeapOccupancy());
27720       }
27721       result = result &&
27722           getUnknownFields().equals(other.getUnknownFields());
27723       return result;
27724     }
27725 
27726     private int memoizedHashCode = 0;
27727     @java.lang.Override
27728     public int hashCode() {
27729       if (memoizedHashCode != 0) {
27730         return memoizedHashCode;
27731       }
27732       int hash = 41;
27733       hash = (19 * hash) + getDescriptorForType().hashCode();
27734       if (hasMemstoreLoad()) {
27735         hash = (37 * hash) + MEMSTORELOAD_FIELD_NUMBER;
27736         hash = (53 * hash) + getMemstoreLoad();
27737       }
27738       if (hasHeapOccupancy()) {
27739         hash = (37 * hash) + HEAPOCCUPANCY_FIELD_NUMBER;
27740         hash = (53 * hash) + getHeapOccupancy();
27741       }
27742       hash = (29 * hash) + getUnknownFields().hashCode();
27743       memoizedHashCode = hash;
27744       return hash;
27745     }
27746 
27747     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats parseFrom(
27748         com.google.protobuf.ByteString data)
27749         throws com.google.protobuf.InvalidProtocolBufferException {
27750       return PARSER.parseFrom(data);
27751     }
27752     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats parseFrom(
27753         com.google.protobuf.ByteString data,
27754         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
27755         throws com.google.protobuf.InvalidProtocolBufferException {
27756       return PARSER.parseFrom(data, extensionRegistry);
27757     }
27758     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats parseFrom(byte[] data)
27759         throws com.google.protobuf.InvalidProtocolBufferException {
27760       return PARSER.parseFrom(data);
27761     }
27762     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats parseFrom(
27763         byte[] data,
27764         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
27765         throws com.google.protobuf.InvalidProtocolBufferException {
27766       return PARSER.parseFrom(data, extensionRegistry);
27767     }
27768     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats parseFrom(java.io.InputStream input)
27769         throws java.io.IOException {
27770       return PARSER.parseFrom(input);
27771     }
27772     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats parseFrom(
27773         java.io.InputStream input,
27774         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
27775         throws java.io.IOException {
27776       return PARSER.parseFrom(input, extensionRegistry);
27777     }
27778     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats parseDelimitedFrom(java.io.InputStream input)
27779         throws java.io.IOException {
27780       return PARSER.parseDelimitedFrom(input);
27781     }
27782     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats parseDelimitedFrom(
27783         java.io.InputStream input,
27784         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
27785         throws java.io.IOException {
27786       return PARSER.parseDelimitedFrom(input, extensionRegistry);
27787     }
27788     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats parseFrom(
27789         com.google.protobuf.CodedInputStream input)
27790         throws java.io.IOException {
27791       return PARSER.parseFrom(input);
27792     }
27793     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats parseFrom(
27794         com.google.protobuf.CodedInputStream input,
27795         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
27796         throws java.io.IOException {
27797       return PARSER.parseFrom(input, extensionRegistry);
27798     }
27799 
27800     public static Builder newBuilder() { return Builder.create(); }
27801     public Builder newBuilderForType() { return newBuilder(); }
27802     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats prototype) {
27803       return newBuilder().mergeFrom(prototype);
27804     }
27805     public Builder toBuilder() { return newBuilder(this); }
27806 
27807     @java.lang.Override
27808     protected Builder newBuilderForType(
27809         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
27810       Builder builder = new Builder(parent);
27811       return builder;
27812     }
27813     /**
27814      * Protobuf type {@code hbase.pb.RegionLoadStats}
27815      *
27816      * <pre>
27817      *
27818      * Statistics about the current load on the region
27819      * </pre>
27820      */
27821     public static final class Builder extends
27822         com.google.protobuf.GeneratedMessage.Builder<Builder>
27823        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder {
27824       public static final com.google.protobuf.Descriptors.Descriptor
27825           getDescriptor() {
27826         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionLoadStats_descriptor;
27827       }
27828 
27829       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
27830           internalGetFieldAccessorTable() {
27831         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionLoadStats_fieldAccessorTable
27832             .ensureFieldAccessorsInitialized(
27833                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.Builder.class);
27834       }
27835 
27836       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.newBuilder()
27837       private Builder() {
27838         maybeForceBuilderInitialization();
27839       }
27840 
27841       private Builder(
27842           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
27843         super(parent);
27844         maybeForceBuilderInitialization();
27845       }
27846       private void maybeForceBuilderInitialization() {
27847         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
27848         }
27849       }
27850       private static Builder create() {
27851         return new Builder();
27852       }
27853 
27854       public Builder clear() {
27855         super.clear();
27856         memstoreLoad_ = 0;
27857         bitField0_ = (bitField0_ & ~0x00000001);
27858         heapOccupancy_ = 0;
27859         bitField0_ = (bitField0_ & ~0x00000002);
27860         return this;
27861       }
27862 
27863       public Builder clone() {
27864         return create().mergeFrom(buildPartial());
27865       }
27866 
27867       public com.google.protobuf.Descriptors.Descriptor
27868           getDescriptorForType() {
27869         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionLoadStats_descriptor;
27870       }
27871 
27872       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats getDefaultInstanceForType() {
27873         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.getDefaultInstance();
27874       }
27875 
27876       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats build() {
27877         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats result = buildPartial();
27878         if (!result.isInitialized()) {
27879           throw newUninitializedMessageException(result);
27880         }
27881         return result;
27882       }
27883 
27884       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats buildPartial() {
27885         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats(this);
27886         int from_bitField0_ = bitField0_;
27887         int to_bitField0_ = 0;
27888         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
27889           to_bitField0_ |= 0x00000001;
27890         }
27891         result.memstoreLoad_ = memstoreLoad_;
27892         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
27893           to_bitField0_ |= 0x00000002;
27894         }
27895         result.heapOccupancy_ = heapOccupancy_;
27896         result.bitField0_ = to_bitField0_;
27897         onBuilt();
27898         return result;
27899       }
27900 
27901       public Builder mergeFrom(com.google.protobuf.Message other) {
27902         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats) {
27903           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats)other);
27904         } else {
27905           super.mergeFrom(other);
27906           return this;
27907         }
27908       }
27909 
27910       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats other) {
27911         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.getDefaultInstance()) return this;
27912         if (other.hasMemstoreLoad()) {
27913           setMemstoreLoad(other.getMemstoreLoad());
27914         }
27915         if (other.hasHeapOccupancy()) {
27916           setHeapOccupancy(other.getHeapOccupancy());
27917         }
27918         this.mergeUnknownFields(other.getUnknownFields());
27919         return this;
27920       }
27921 
27922       public final boolean isInitialized() {
27923         return true;
27924       }
27925 
27926       public Builder mergeFrom(
27927           com.google.protobuf.CodedInputStream input,
27928           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
27929           throws java.io.IOException {
27930         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats parsedMessage = null;
27931         try {
27932           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
27933         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
27934           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats) e.getUnfinishedMessage();
27935           throw e;
27936         } finally {
27937           if (parsedMessage != null) {
27938             mergeFrom(parsedMessage);
27939           }
27940         }
27941         return this;
27942       }
27943       private int bitField0_;
27944 
27945       // optional int32 memstoreLoad = 1 [default = 0];
27946       private int memstoreLoad_ ;
27947       /**
27948        * <code>optional int32 memstoreLoad = 1 [default = 0];</code>
27949        *
27950        * <pre>
27951        * Percent load on the memstore. Guaranteed to be positive, between 0 and 100.
27952        * </pre>
27953        */
27954       public boolean hasMemstoreLoad() {
27955         return ((bitField0_ & 0x00000001) == 0x00000001);
27956       }
27957       /**
27958        * <code>optional int32 memstoreLoad = 1 [default = 0];</code>
27959        *
27960        * <pre>
27961        * Percent load on the memstore. Guaranteed to be positive, between 0 and 100.
27962        * </pre>
27963        */
27964       public int getMemstoreLoad() {
27965         return memstoreLoad_;
27966       }
27967       /**
27968        * <code>optional int32 memstoreLoad = 1 [default = 0];</code>
27969        *
27970        * <pre>
27971        * Percent load on the memstore. Guaranteed to be positive, between 0 and 100.
27972        * </pre>
27973        */
27974       public Builder setMemstoreLoad(int value) {
27975         bitField0_ |= 0x00000001;
27976         memstoreLoad_ = value;
27977         onChanged();
27978         return this;
27979       }
27980       /**
27981        * <code>optional int32 memstoreLoad = 1 [default = 0];</code>
27982        *
27983        * <pre>
27984        * Percent load on the memstore. Guaranteed to be positive, between 0 and 100.
27985        * </pre>
27986        */
27987       public Builder clearMemstoreLoad() {
27988         bitField0_ = (bitField0_ & ~0x00000001);
27989         memstoreLoad_ = 0;
27990         onChanged();
27991         return this;
27992       }
27993 
27994       // optional int32 heapOccupancy = 2 [default = 0];
27995       private int heapOccupancy_ ;
27996       /**
27997        * <code>optional int32 heapOccupancy = 2 [default = 0];</code>
27998        *
27999        * <pre>
28000        * Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100.
28001        * We can move this to "ServerLoadStats" should we develop them.
28002        * </pre>
28003        */
28004       public boolean hasHeapOccupancy() {
28005         return ((bitField0_ & 0x00000002) == 0x00000002);
28006       }
28007       /**
28008        * <code>optional int32 heapOccupancy = 2 [default = 0];</code>
28009        *
28010        * <pre>
28011        * Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100.
28012        * We can move this to "ServerLoadStats" should we develop them.
28013        * </pre>
28014        */
28015       public int getHeapOccupancy() {
28016         return heapOccupancy_;
28017       }
28018       /**
28019        * <code>optional int32 heapOccupancy = 2 [default = 0];</code>
28020        *
28021        * <pre>
28022        * Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100.
28023        * We can move this to "ServerLoadStats" should we develop them.
28024        * </pre>
28025        */
28026       public Builder setHeapOccupancy(int value) {
28027         bitField0_ |= 0x00000002;
28028         heapOccupancy_ = value;
28029         onChanged();
28030         return this;
28031       }
28032       /**
28033        * <code>optional int32 heapOccupancy = 2 [default = 0];</code>
28034        *
28035        * <pre>
28036        * Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100.
28037        * We can move this to "ServerLoadStats" should we develop them.
28038        * </pre>
28039        */
28040       public Builder clearHeapOccupancy() {
28041         bitField0_ = (bitField0_ & ~0x00000002);
28042         heapOccupancy_ = 0;
28043         onChanged();
28044         return this;
28045       }
28046 
28047       // @@protoc_insertion_point(builder_scope:hbase.pb.RegionLoadStats)
28048     }
28049 
28050     static {
28051       defaultInstance = new RegionLoadStats(true);
28052       defaultInstance.initFields();
28053     }
28054 
28055     // @@protoc_insertion_point(class_scope:hbase.pb.RegionLoadStats)
28056   }
28057 
28058   public interface ResultOrExceptionOrBuilder
28059       extends com.google.protobuf.MessageOrBuilder {
28060 
28061     // optional uint32 index = 1;
28062     /**
28063      * <code>optional uint32 index = 1;</code>
28064      *
28065      * <pre>
28066      * If part of a multi call, save original index of the list of all
28067      * passed so can align this response w/ original request.
28068      * </pre>
28069      */
28070     boolean hasIndex();
28071     /**
28072      * <code>optional uint32 index = 1;</code>
28073      *
28074      * <pre>
28075      * If part of a multi call, save original index of the list of all
28076      * passed so can align this response w/ original request.
28077      * </pre>
28078      */
28079     int getIndex();
28080 
28081     // optional .hbase.pb.Result result = 2;
28082     /**
28083      * <code>optional .hbase.pb.Result result = 2;</code>
28084      */
28085     boolean hasResult();
28086     /**
28087      * <code>optional .hbase.pb.Result result = 2;</code>
28088      */
28089     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult();
28090     /**
28091      * <code>optional .hbase.pb.Result result = 2;</code>
28092      */
28093     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder();
28094 
28095     // optional .hbase.pb.NameBytesPair exception = 3;
28096     /**
28097      * <code>optional .hbase.pb.NameBytesPair exception = 3;</code>
28098      */
28099     boolean hasException();
28100     /**
28101      * <code>optional .hbase.pb.NameBytesPair exception = 3;</code>
28102      */
28103     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getException();
28104     /**
28105      * <code>optional .hbase.pb.NameBytesPair exception = 3;</code>
28106      */
28107     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder();
28108 
28109     // optional .hbase.pb.CoprocessorServiceResult service_result = 4;
28110     /**
28111      * <code>optional .hbase.pb.CoprocessorServiceResult service_result = 4;</code>
28112      *
28113      * <pre>
28114      * result if this was a coprocessor service call
28115      * </pre>
28116      */
28117     boolean hasServiceResult();
28118     /**
28119      * <code>optional .hbase.pb.CoprocessorServiceResult service_result = 4;</code>
28120      *
28121      * <pre>
28122      * result if this was a coprocessor service call
28123      * </pre>
28124      */
28125     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult getServiceResult();
28126     /**
28127      * <code>optional .hbase.pb.CoprocessorServiceResult service_result = 4;</code>
28128      *
28129      * <pre>
28130      * result if this was a coprocessor service call
28131      * </pre>
28132      */
28133     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResultOrBuilder getServiceResultOrBuilder();
28134 
28135     // optional .hbase.pb.RegionLoadStats loadStats = 5;
28136     /**
28137      * <code>optional .hbase.pb.RegionLoadStats loadStats = 5;</code>
28138      *
28139      * <pre>
28140      * current load on the region
28141      * </pre>
28142      */
28143     boolean hasLoadStats();
28144     /**
28145      * <code>optional .hbase.pb.RegionLoadStats loadStats = 5;</code>
28146      *
28147      * <pre>
28148      * current load on the region
28149      * </pre>
28150      */
28151     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats getLoadStats();
28152     /**
28153      * <code>optional .hbase.pb.RegionLoadStats loadStats = 5;</code>
28154      *
28155      * <pre>
28156      * current load on the region
28157      * </pre>
28158      */
28159     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder getLoadStatsOrBuilder();
28160   }
28161   /**
28162    * Protobuf type {@code hbase.pb.ResultOrException}
28163    *
28164    * <pre>
28165    **
28166    * Either a Result or an Exception NameBytesPair (keyed by
28167    * exception name whose value is the exception stringified)
28168    * or maybe empty if no result and no exception.
28169    * </pre>
28170    */
28171   public static final class ResultOrException extends
28172       com.google.protobuf.GeneratedMessage
28173       implements ResultOrExceptionOrBuilder {
28174     // Use ResultOrException.newBuilder() to construct.
28175     private ResultOrException(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
28176       super(builder);
28177       this.unknownFields = builder.getUnknownFields();
28178     }
28179     private ResultOrException(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
28180 
28181     private static final ResultOrException defaultInstance;
28182     public static ResultOrException getDefaultInstance() {
28183       return defaultInstance;
28184     }
28185 
28186     public ResultOrException getDefaultInstanceForType() {
28187       return defaultInstance;
28188     }
28189 
28190     private final com.google.protobuf.UnknownFieldSet unknownFields;
28191     @java.lang.Override
28192     public final com.google.protobuf.UnknownFieldSet
28193         getUnknownFields() {
28194       return this.unknownFields;
28195     }
28196     private ResultOrException(
28197         com.google.protobuf.CodedInputStream input,
28198         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
28199         throws com.google.protobuf.InvalidProtocolBufferException {
28200       initFields();
28201       int mutable_bitField0_ = 0;
28202       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
28203           com.google.protobuf.UnknownFieldSet.newBuilder();
28204       try {
28205         boolean done = false;
28206         while (!done) {
28207           int tag = input.readTag();
28208           switch (tag) {
28209             case 0:
28210               done = true;
28211               break;
28212             default: {
28213               if (!parseUnknownField(input, unknownFields,
28214                                      extensionRegistry, tag)) {
28215                 done = true;
28216               }
28217               break;
28218             }
28219             case 8: {
28220               bitField0_ |= 0x00000001;
28221               index_ = input.readUInt32();
28222               break;
28223             }
28224             case 18: {
28225               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder subBuilder = null;
28226               if (((bitField0_ & 0x00000002) == 0x00000002)) {
28227                 subBuilder = result_.toBuilder();
28228               }
28229               result_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.PARSER, extensionRegistry);
28230               if (subBuilder != null) {
28231                 subBuilder.mergeFrom(result_);
28232                 result_ = subBuilder.buildPartial();
28233               }
28234               bitField0_ |= 0x00000002;
28235               break;
28236             }
28237             case 26: {
28238               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = null;
28239               if (((bitField0_ & 0x00000004) == 0x00000004)) {
28240                 subBuilder = exception_.toBuilder();
28241               }
28242               exception_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.PARSER, extensionRegistry);
28243               if (subBuilder != null) {
28244                 subBuilder.mergeFrom(exception_);
28245                 exception_ = subBuilder.buildPartial();
28246               }
28247               bitField0_ |= 0x00000004;
28248               break;
28249             }
28250             case 34: {
28251               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.Builder subBuilder = null;
28252               if (((bitField0_ & 0x00000008) == 0x00000008)) {
28253                 subBuilder = serviceResult_.toBuilder();
28254               }
28255               serviceResult_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.PARSER, extensionRegistry);
28256               if (subBuilder != null) {
28257                 subBuilder.mergeFrom(serviceResult_);
28258                 serviceResult_ = subBuilder.buildPartial();
28259               }
28260               bitField0_ |= 0x00000008;
28261               break;
28262             }
28263             case 42: {
28264               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.Builder subBuilder = null;
28265               if (((bitField0_ & 0x00000010) == 0x00000010)) {
28266                 subBuilder = loadStats_.toBuilder();
28267               }
28268               loadStats_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.PARSER, extensionRegistry);
28269               if (subBuilder != null) {
28270                 subBuilder.mergeFrom(loadStats_);
28271                 loadStats_ = subBuilder.buildPartial();
28272               }
28273               bitField0_ |= 0x00000010;
28274               break;
28275             }
28276           }
28277         }
28278       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
28279         throw e.setUnfinishedMessage(this);
28280       } catch (java.io.IOException e) {
28281         throw new com.google.protobuf.InvalidProtocolBufferException(
28282             e.getMessage()).setUnfinishedMessage(this);
28283       } finally {
28284         this.unknownFields = unknownFields.build();
28285         makeExtensionsImmutable();
28286       }
28287     }
28288     public static final com.google.protobuf.Descriptors.Descriptor
28289         getDescriptor() {
28290       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_ResultOrException_descriptor;
28291     }
28292 
28293     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
28294         internalGetFieldAccessorTable() {
28295       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_ResultOrException_fieldAccessorTable
28296           .ensureFieldAccessorsInitialized(
28297               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder.class);
28298     }
28299 
28300     public static com.google.protobuf.Parser<ResultOrException> PARSER =
28301         new com.google.protobuf.AbstractParser<ResultOrException>() {
28302       public ResultOrException parsePartialFrom(
28303           com.google.protobuf.CodedInputStream input,
28304           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
28305           throws com.google.protobuf.InvalidProtocolBufferException {
28306         return new ResultOrException(input, extensionRegistry);
28307       }
28308     };
28309 
28310     @java.lang.Override
28311     public com.google.protobuf.Parser<ResultOrException> getParserForType() {
28312       return PARSER;
28313     }
28314 
28315     private int bitField0_;
28316     // optional uint32 index = 1;
28317     public static final int INDEX_FIELD_NUMBER = 1;
28318     private int index_;
28319     /**
28320      * <code>optional uint32 index = 1;</code>
28321      *
28322      * <pre>
28323      * If part of a multi call, save original index of the list of all
28324      * passed so can align this response w/ original request.
28325      * </pre>
28326      */
28327     public boolean hasIndex() {
28328       return ((bitField0_ & 0x00000001) == 0x00000001);
28329     }
28330     /**
28331      * <code>optional uint32 index = 1;</code>
28332      *
28333      * <pre>
28334      * If part of a multi call, save original index of the list of all
28335      * passed so can align this response w/ original request.
28336      * </pre>
28337      */
28338     public int getIndex() {
28339       return index_;
28340     }
28341 
28342     // optional .hbase.pb.Result result = 2;
28343     public static final int RESULT_FIELD_NUMBER = 2;
28344     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result_;
28345     /**
28346      * <code>optional .hbase.pb.Result result = 2;</code>
28347      */
28348     public boolean hasResult() {
28349       return ((bitField0_ & 0x00000002) == 0x00000002);
28350     }
28351     /**
28352      * <code>optional .hbase.pb.Result result = 2;</code>
28353      */
28354     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult() {
28355       return result_;
28356     }
28357     /**
28358      * <code>optional .hbase.pb.Result result = 2;</code>
28359      */
28360     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() {
28361       return result_;
28362     }
28363 
28364     // optional .hbase.pb.NameBytesPair exception = 3;
28365     public static final int EXCEPTION_FIELD_NUMBER = 3;
28366     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair exception_;
28367     /**
28368      * <code>optional .hbase.pb.NameBytesPair exception = 3;</code>
28369      */
28370     public boolean hasException() {
28371       return ((bitField0_ & 0x00000004) == 0x00000004);
28372     }
28373     /**
28374      * <code>optional .hbase.pb.NameBytesPair exception = 3;</code>
28375      */
28376     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getException() {
28377       return exception_;
28378     }
28379     /**
28380      * <code>optional .hbase.pb.NameBytesPair exception = 3;</code>
28381      */
28382     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder() {
28383       return exception_;
28384     }
28385 
28386     // optional .hbase.pb.CoprocessorServiceResult service_result = 4;
28387     public static final int SERVICE_RESULT_FIELD_NUMBER = 4;
28388     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult serviceResult_;
28389     /**
28390      * <code>optional .hbase.pb.CoprocessorServiceResult service_result = 4;</code>
28391      *
28392      * <pre>
28393      * result if this was a coprocessor service call
28394      * </pre>
28395      */
28396     public boolean hasServiceResult() {
28397       return ((bitField0_ & 0x00000008) == 0x00000008);
28398     }
28399     /**
28400      * <code>optional .hbase.pb.CoprocessorServiceResult service_result = 4;</code>
28401      *
28402      * <pre>
28403      * result if this was a coprocessor service call
28404      * </pre>
28405      */
28406     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult getServiceResult() {
28407       return serviceResult_;
28408     }
28409     /**
28410      * <code>optional .hbase.pb.CoprocessorServiceResult service_result = 4;</code>
28411      *
28412      * <pre>
28413      * result if this was a coprocessor service call
28414      * </pre>
28415      */
28416     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResultOrBuilder getServiceResultOrBuilder() {
28417       return serviceResult_;
28418     }
28419 
28420     // optional .hbase.pb.RegionLoadStats loadStats = 5;
28421     public static final int LOADSTATS_FIELD_NUMBER = 5;
28422     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats loadStats_;
28423     /**
28424      * <code>optional .hbase.pb.RegionLoadStats loadStats = 5;</code>
28425      *
28426      * <pre>
28427      * current load on the region
28428      * </pre>
28429      */
28430     public boolean hasLoadStats() {
28431       return ((bitField0_ & 0x00000010) == 0x00000010);
28432     }
28433     /**
28434      * <code>optional .hbase.pb.RegionLoadStats loadStats = 5;</code>
28435      *
28436      * <pre>
28437      * current load on the region
28438      * </pre>
28439      */
28440     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats getLoadStats() {
28441       return loadStats_;
28442     }
28443     /**
28444      * <code>optional .hbase.pb.RegionLoadStats loadStats = 5;</code>
28445      *
28446      * <pre>
28447      * current load on the region
28448      * </pre>
28449      */
28450     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder getLoadStatsOrBuilder() {
28451       return loadStats_;
28452     }
28453 
28454     private void initFields() {
28455       index_ = 0;
28456       result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
28457       exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
28458       serviceResult_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.getDefaultInstance();
28459       loadStats_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.getDefaultInstance();
28460     }
28461     private byte memoizedIsInitialized = -1;
28462     public final boolean isInitialized() {
28463       byte isInitialized = memoizedIsInitialized;
28464       if (isInitialized != -1) return isInitialized == 1;
28465 
28466       if (hasException()) {
28467         if (!getException().isInitialized()) {
28468           memoizedIsInitialized = 0;
28469           return false;
28470         }
28471       }
28472       if (hasServiceResult()) {
28473         if (!getServiceResult().isInitialized()) {
28474           memoizedIsInitialized = 0;
28475           return false;
28476         }
28477       }
28478       memoizedIsInitialized = 1;
28479       return true;
28480     }
28481 
28482     public void writeTo(com.google.protobuf.CodedOutputStream output)
28483                         throws java.io.IOException {
28484       getSerializedSize();
28485       if (((bitField0_ & 0x00000001) == 0x00000001)) {
28486         output.writeUInt32(1, index_);
28487       }
28488       if (((bitField0_ & 0x00000002) == 0x00000002)) {
28489         output.writeMessage(2, result_);
28490       }
28491       if (((bitField0_ & 0x00000004) == 0x00000004)) {
28492         output.writeMessage(3, exception_);
28493       }
28494       if (((bitField0_ & 0x00000008) == 0x00000008)) {
28495         output.writeMessage(4, serviceResult_);
28496       }
28497       if (((bitField0_ & 0x00000010) == 0x00000010)) {
28498         output.writeMessage(5, loadStats_);
28499       }
28500       getUnknownFields().writeTo(output);
28501     }
28502 
28503     private int memoizedSerializedSize = -1;
28504     public int getSerializedSize() {
28505       int size = memoizedSerializedSize;
28506       if (size != -1) return size;
28507 
28508       size = 0;
28509       if (((bitField0_ & 0x00000001) == 0x00000001)) {
28510         size += com.google.protobuf.CodedOutputStream
28511           .computeUInt32Size(1, index_);
28512       }
28513       if (((bitField0_ & 0x00000002) == 0x00000002)) {
28514         size += com.google.protobuf.CodedOutputStream
28515           .computeMessageSize(2, result_);
28516       }
28517       if (((bitField0_ & 0x00000004) == 0x00000004)) {
28518         size += com.google.protobuf.CodedOutputStream
28519           .computeMessageSize(3, exception_);
28520       }
28521       if (((bitField0_ & 0x00000008) == 0x00000008)) {
28522         size += com.google.protobuf.CodedOutputStream
28523           .computeMessageSize(4, serviceResult_);
28524       }
28525       if (((bitField0_ & 0x00000010) == 0x00000010)) {
28526         size += com.google.protobuf.CodedOutputStream
28527           .computeMessageSize(5, loadStats_);
28528       }
28529       size += getUnknownFields().getSerializedSize();
28530       memoizedSerializedSize = size;
28531       return size;
28532     }
28533 
28534     private static final long serialVersionUID = 0L;
28535     @java.lang.Override
28536     protected java.lang.Object writeReplace()
28537         throws java.io.ObjectStreamException {
28538       return super.writeReplace();
28539     }
28540 
28541     @java.lang.Override
28542     public boolean equals(final java.lang.Object obj) {
28543       if (obj == this) {
28544        return true;
28545       }
28546       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException)) {
28547         return super.equals(obj);
28548       }
28549       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException) obj;
28550 
28551       boolean result = true;
28552       result = result && (hasIndex() == other.hasIndex());
28553       if (hasIndex()) {
28554         result = result && (getIndex()
28555             == other.getIndex());
28556       }
28557       result = result && (hasResult() == other.hasResult());
28558       if (hasResult()) {
28559         result = result && getResult()
28560             .equals(other.getResult());
28561       }
28562       result = result && (hasException() == other.hasException());
28563       if (hasException()) {
28564         result = result && getException()
28565             .equals(other.getException());
28566       }
28567       result = result && (hasServiceResult() == other.hasServiceResult());
28568       if (hasServiceResult()) {
28569         result = result && getServiceResult()
28570             .equals(other.getServiceResult());
28571       }
28572       result = result && (hasLoadStats() == other.hasLoadStats());
28573       if (hasLoadStats()) {
28574         result = result && getLoadStats()
28575             .equals(other.getLoadStats());
28576       }
28577       result = result &&
28578           getUnknownFields().equals(other.getUnknownFields());
28579       return result;
28580     }
28581 
28582     private int memoizedHashCode = 0;
28583     @java.lang.Override
28584     public int hashCode() {
28585       if (memoizedHashCode != 0) {
28586         return memoizedHashCode;
28587       }
28588       int hash = 41;
28589       hash = (19 * hash) + getDescriptorForType().hashCode();
28590       if (hasIndex()) {
28591         hash = (37 * hash) + INDEX_FIELD_NUMBER;
28592         hash = (53 * hash) + getIndex();
28593       }
28594       if (hasResult()) {
28595         hash = (37 * hash) + RESULT_FIELD_NUMBER;
28596         hash = (53 * hash) + getResult().hashCode();
28597       }
28598       if (hasException()) {
28599         hash = (37 * hash) + EXCEPTION_FIELD_NUMBER;
28600         hash = (53 * hash) + getException().hashCode();
28601       }
28602       if (hasServiceResult()) {
28603         hash = (37 * hash) + SERVICE_RESULT_FIELD_NUMBER;
28604         hash = (53 * hash) + getServiceResult().hashCode();
28605       }
28606       if (hasLoadStats()) {
28607         hash = (37 * hash) + LOADSTATS_FIELD_NUMBER;
28608         hash = (53 * hash) + getLoadStats().hashCode();
28609       }
28610       hash = (29 * hash) + getUnknownFields().hashCode();
28611       memoizedHashCode = hash;
28612       return hash;
28613     }
28614 
28615     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parseFrom(
28616         com.google.protobuf.ByteString data)
28617         throws com.google.protobuf.InvalidProtocolBufferException {
28618       return PARSER.parseFrom(data);
28619     }
28620     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parseFrom(
28621         com.google.protobuf.ByteString data,
28622         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
28623         throws com.google.protobuf.InvalidProtocolBufferException {
28624       return PARSER.parseFrom(data, extensionRegistry);
28625     }
28626     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parseFrom(byte[] data)
28627         throws com.google.protobuf.InvalidProtocolBufferException {
28628       return PARSER.parseFrom(data);
28629     }
28630     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parseFrom(
28631         byte[] data,
28632         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
28633         throws com.google.protobuf.InvalidProtocolBufferException {
28634       return PARSER.parseFrom(data, extensionRegistry);
28635     }
28636     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parseFrom(java.io.InputStream input)
28637         throws java.io.IOException {
28638       return PARSER.parseFrom(input);
28639     }
28640     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parseFrom(
28641         java.io.InputStream input,
28642         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
28643         throws java.io.IOException {
28644       return PARSER.parseFrom(input, extensionRegistry);
28645     }
28646     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parseDelimitedFrom(java.io.InputStream input)
28647         throws java.io.IOException {
28648       return PARSER.parseDelimitedFrom(input);
28649     }
28650     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parseDelimitedFrom(
28651         java.io.InputStream input,
28652         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
28653         throws java.io.IOException {
28654       return PARSER.parseDelimitedFrom(input, extensionRegistry);
28655     }
28656     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parseFrom(
28657         com.google.protobuf.CodedInputStream input)
28658         throws java.io.IOException {
28659       return PARSER.parseFrom(input);
28660     }
28661     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parseFrom(
28662         com.google.protobuf.CodedInputStream input,
28663         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
28664         throws java.io.IOException {
28665       return PARSER.parseFrom(input, extensionRegistry);
28666     }
28667 
28668     public static Builder newBuilder() { return Builder.create(); }
28669     public Builder newBuilderForType() { return newBuilder(); }
28670     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException prototype) {
28671       return newBuilder().mergeFrom(prototype);
28672     }
28673     public Builder toBuilder() { return newBuilder(this); }
28674 
28675     @java.lang.Override
28676     protected Builder newBuilderForType(
28677         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
28678       Builder builder = new Builder(parent);
28679       return builder;
28680     }
28681     /**
28682      * Protobuf type {@code hbase.pb.ResultOrException}
28683      *
28684      * <pre>
28685      **
28686      * Either a Result or an Exception NameBytesPair (keyed by
28687      * exception name whose value is the exception stringified)
28688      * or maybe empty if no result and no exception.
28689      * </pre>
28690      */
28691     public static final class Builder extends
28692         com.google.protobuf.GeneratedMessage.Builder<Builder>
28693        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder {
28694       public static final com.google.protobuf.Descriptors.Descriptor
28695           getDescriptor() {
28696         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_ResultOrException_descriptor;
28697       }
28698 
28699       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
28700           internalGetFieldAccessorTable() {
28701         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_ResultOrException_fieldAccessorTable
28702             .ensureFieldAccessorsInitialized(
28703                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder.class);
28704       }
28705 
28706       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.newBuilder()
28707       private Builder() {
28708         maybeForceBuilderInitialization();
28709       }
28710 
28711       private Builder(
28712           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
28713         super(parent);
28714         maybeForceBuilderInitialization();
28715       }
28716       private void maybeForceBuilderInitialization() {
28717         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
28718           getResultFieldBuilder();
28719           getExceptionFieldBuilder();
28720           getServiceResultFieldBuilder();
28721           getLoadStatsFieldBuilder();
28722         }
28723       }
28724       private static Builder create() {
28725         return new Builder();
28726       }
28727 
28728       public Builder clear() {
28729         super.clear();
28730         index_ = 0;
28731         bitField0_ = (bitField0_ & ~0x00000001);
28732         if (resultBuilder_ == null) {
28733           result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
28734         } else {
28735           resultBuilder_.clear();
28736         }
28737         bitField0_ = (bitField0_ & ~0x00000002);
28738         if (exceptionBuilder_ == null) {
28739           exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
28740         } else {
28741           exceptionBuilder_.clear();
28742         }
28743         bitField0_ = (bitField0_ & ~0x00000004);
28744         if (serviceResultBuilder_ == null) {
28745           serviceResult_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.getDefaultInstance();
28746         } else {
28747           serviceResultBuilder_.clear();
28748         }
28749         bitField0_ = (bitField0_ & ~0x00000008);
28750         if (loadStatsBuilder_ == null) {
28751           loadStats_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.getDefaultInstance();
28752         } else {
28753           loadStatsBuilder_.clear();
28754         }
28755         bitField0_ = (bitField0_ & ~0x00000010);
28756         return this;
28757       }
28758 
28759       public Builder clone() {
28760         return create().mergeFrom(buildPartial());
28761       }
28762 
28763       public com.google.protobuf.Descriptors.Descriptor
28764           getDescriptorForType() {
28765         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_ResultOrException_descriptor;
28766       }
28767 
28768       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException getDefaultInstanceForType() {
28769         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.getDefaultInstance();
28770       }
28771 
28772       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException build() {
28773         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException result = buildPartial();
28774         if (!result.isInitialized()) {
28775           throw newUninitializedMessageException(result);
28776         }
28777         return result;
28778       }
28779 
28780       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException buildPartial() {
28781         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException(this);
28782         int from_bitField0_ = bitField0_;
28783         int to_bitField0_ = 0;
28784         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
28785           to_bitField0_ |= 0x00000001;
28786         }
28787         result.index_ = index_;
28788         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
28789           to_bitField0_ |= 0x00000002;
28790         }
28791         if (resultBuilder_ == null) {
28792           result.result_ = result_;
28793         } else {
28794           result.result_ = resultBuilder_.build();
28795         }
28796         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
28797           to_bitField0_ |= 0x00000004;
28798         }
28799         if (exceptionBuilder_ == null) {
28800           result.exception_ = exception_;
28801         } else {
28802           result.exception_ = exceptionBuilder_.build();
28803         }
28804         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
28805           to_bitField0_ |= 0x00000008;
28806         }
28807         if (serviceResultBuilder_ == null) {
28808           result.serviceResult_ = serviceResult_;
28809         } else {
28810           result.serviceResult_ = serviceResultBuilder_.build();
28811         }
28812         if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
28813           to_bitField0_ |= 0x00000010;
28814         }
28815         if (loadStatsBuilder_ == null) {
28816           result.loadStats_ = loadStats_;
28817         } else {
28818           result.loadStats_ = loadStatsBuilder_.build();
28819         }
28820         result.bitField0_ = to_bitField0_;
28821         onBuilt();
28822         return result;
28823       }
28824 
28825       public Builder mergeFrom(com.google.protobuf.Message other) {
28826         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException) {
28827           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException)other);
28828         } else {
28829           super.mergeFrom(other);
28830           return this;
28831         }
28832       }
28833 
28834       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException other) {
28835         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.getDefaultInstance()) return this;
28836         if (other.hasIndex()) {
28837           setIndex(other.getIndex());
28838         }
28839         if (other.hasResult()) {
28840           mergeResult(other.getResult());
28841         }
28842         if (other.hasException()) {
28843           mergeException(other.getException());
28844         }
28845         if (other.hasServiceResult()) {
28846           mergeServiceResult(other.getServiceResult());
28847         }
28848         if (other.hasLoadStats()) {
28849           mergeLoadStats(other.getLoadStats());
28850         }
28851         this.mergeUnknownFields(other.getUnknownFields());
28852         return this;
28853       }
28854 
28855       public final boolean isInitialized() {
28856         if (hasException()) {
28857           if (!getException().isInitialized()) {
28858             
28859             return false;
28860           }
28861         }
28862         if (hasServiceResult()) {
28863           if (!getServiceResult().isInitialized()) {
28864             
28865             return false;
28866           }
28867         }
28868         return true;
28869       }
28870 
28871       public Builder mergeFrom(
28872           com.google.protobuf.CodedInputStream input,
28873           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
28874           throws java.io.IOException {
28875         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parsedMessage = null;
28876         try {
28877           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
28878         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
28879           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException) e.getUnfinishedMessage();
28880           throw e;
28881         } finally {
28882           if (parsedMessage != null) {
28883             mergeFrom(parsedMessage);
28884           }
28885         }
28886         return this;
28887       }
28888       private int bitField0_;
28889 
28890       // optional uint32 index = 1;
28891       private int index_ ;
28892       /**
28893        * <code>optional uint32 index = 1;</code>
28894        *
28895        * <pre>
28896        * If part of a multi call, save original index of the list of all
28897        * passed so can align this response w/ original request.
28898        * </pre>
28899        */
28900       public boolean hasIndex() {
28901         return ((bitField0_ & 0x00000001) == 0x00000001);
28902       }
28903       /**
28904        * <code>optional uint32 index = 1;</code>
28905        *
28906        * <pre>
28907        * If part of a multi call, save original index of the list of all
28908        * passed so can align this response w/ original request.
28909        * </pre>
28910        */
28911       public int getIndex() {
28912         return index_;
28913       }
28914       /**
28915        * <code>optional uint32 index = 1;</code>
28916        *
28917        * <pre>
28918        * If part of a multi call, save original index of the list of all
28919        * passed so can align this response w/ original request.
28920        * </pre>
28921        */
28922       public Builder setIndex(int value) {
28923         bitField0_ |= 0x00000001;
28924         index_ = value;
28925         onChanged();
28926         return this;
28927       }
28928       /**
28929        * <code>optional uint32 index = 1;</code>
28930        *
28931        * <pre>
28932        * If part of a multi call, save original index of the list of all
28933        * passed so can align this response w/ original request.
28934        * </pre>
28935        */
28936       public Builder clearIndex() {
28937         bitField0_ = (bitField0_ & ~0x00000001);
28938         index_ = 0;
28939         onChanged();
28940         return this;
28941       }
28942 
28943       // optional .hbase.pb.Result result = 2;
28944       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
28945       private com.google.protobuf.SingleFieldBuilder<
28946           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> resultBuilder_;
28947       /**
28948        * <code>optional .hbase.pb.Result result = 2;</code>
28949        */
28950       public boolean hasResult() {
28951         return ((bitField0_ & 0x00000002) == 0x00000002);
28952       }
28953       /**
28954        * <code>optional .hbase.pb.Result result = 2;</code>
28955        */
28956       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult() {
28957         if (resultBuilder_ == null) {
28958           return result_;
28959         } else {
28960           return resultBuilder_.getMessage();
28961         }
28962       }
28963       /**
28964        * <code>optional .hbase.pb.Result result = 2;</code>
28965        */
28966       public Builder setResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) {
28967         if (resultBuilder_ == null) {
28968           if (value == null) {
28969             throw new NullPointerException();
28970           }
28971           result_ = value;
28972           onChanged();
28973         } else {
28974           resultBuilder_.setMessage(value);
28975         }
28976         bitField0_ |= 0x00000002;
28977         return this;
28978       }
28979       /**
28980        * <code>optional .hbase.pb.Result result = 2;</code>
28981        */
28982       public Builder setResult(
28983           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) {
28984         if (resultBuilder_ == null) {
28985           result_ = builderForValue.build();
28986           onChanged();
28987         } else {
28988           resultBuilder_.setMessage(builderForValue.build());
28989         }
28990         bitField0_ |= 0x00000002;
28991         return this;
28992       }
28993       /**
28994        * <code>optional .hbase.pb.Result result = 2;</code>
28995        */
28996       public Builder mergeResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) {
28997         if (resultBuilder_ == null) {
28998           if (((bitField0_ & 0x00000002) == 0x00000002) &&
28999               result_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance()) {
29000             result_ =
29001               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder(result_).mergeFrom(value).buildPartial();
29002           } else {
29003             result_ = value;
29004           }
29005           onChanged();
29006         } else {
29007           resultBuilder_.mergeFrom(value);
29008         }
29009         bitField0_ |= 0x00000002;
29010         return this;
29011       }
29012       /**
29013        * <code>optional .hbase.pb.Result result = 2;</code>
29014        */
29015       public Builder clearResult() {
29016         if (resultBuilder_ == null) {
29017           result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
29018           onChanged();
29019         } else {
29020           resultBuilder_.clear();
29021         }
29022         bitField0_ = (bitField0_ & ~0x00000002);
29023         return this;
29024       }
29025       /**
29026        * <code>optional .hbase.pb.Result result = 2;</code>
29027        */
29028       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder getResultBuilder() {
29029         bitField0_ |= 0x00000002;
29030         onChanged();
29031         return getResultFieldBuilder().getBuilder();
29032       }
29033       /**
29034        * <code>optional .hbase.pb.Result result = 2;</code>
29035        */
29036       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() {
29037         if (resultBuilder_ != null) {
29038           return resultBuilder_.getMessageOrBuilder();
29039         } else {
29040           return result_;
29041         }
29042       }
29043       /**
29044        * <code>optional .hbase.pb.Result result = 2;</code>
29045        */
29046       private com.google.protobuf.SingleFieldBuilder<
29047           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> 
29048           getResultFieldBuilder() {
29049         if (resultBuilder_ == null) {
29050           resultBuilder_ = new com.google.protobuf.SingleFieldBuilder<
29051               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder>(
29052                   result_,
29053                   getParentForChildren(),
29054                   isClean());
29055           result_ = null;
29056         }
29057         return resultBuilder_;
29058       }
29059 
29060       // optional .hbase.pb.NameBytesPair exception = 3;
29061       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
29062       private com.google.protobuf.SingleFieldBuilder<
29063           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> exceptionBuilder_;
29064       /**
29065        * <code>optional .hbase.pb.NameBytesPair exception = 3;</code>
29066        */
29067       public boolean hasException() {
29068         return ((bitField0_ & 0x00000004) == 0x00000004);
29069       }
29070       /**
29071        * <code>optional .hbase.pb.NameBytesPair exception = 3;</code>
29072        */
29073       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getException() {
29074         if (exceptionBuilder_ == null) {
29075           return exception_;
29076         } else {
29077           return exceptionBuilder_.getMessage();
29078         }
29079       }
29080       /**
29081        * <code>optional .hbase.pb.NameBytesPair exception = 3;</code>
29082        */
29083       public Builder setException(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
29084         if (exceptionBuilder_ == null) {
29085           if (value == null) {
29086             throw new NullPointerException();
29087           }
29088           exception_ = value;
29089           onChanged();
29090         } else {
29091           exceptionBuilder_.setMessage(value);
29092         }
29093         bitField0_ |= 0x00000004;
29094         return this;
29095       }
29096       /**
29097        * <code>optional .hbase.pb.NameBytesPair exception = 3;</code>
29098        */
29099       public Builder setException(
29100           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
29101         if (exceptionBuilder_ == null) {
29102           exception_ = builderForValue.build();
29103           onChanged();
29104         } else {
29105           exceptionBuilder_.setMessage(builderForValue.build());
29106         }
29107         bitField0_ |= 0x00000004;
29108         return this;
29109       }
29110       /**
29111        * <code>optional .hbase.pb.NameBytesPair exception = 3;</code>
29112        */
29113       public Builder mergeException(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
29114         if (exceptionBuilder_ == null) {
29115           if (((bitField0_ & 0x00000004) == 0x00000004) &&
29116               exception_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) {
29117             exception_ =
29118               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(exception_).mergeFrom(value).buildPartial();
29119           } else {
29120             exception_ = value;
29121           }
29122           onChanged();
29123         } else {
29124           exceptionBuilder_.mergeFrom(value);
29125         }
29126         bitField0_ |= 0x00000004;
29127         return this;
29128       }
29129       /**
29130        * <code>optional .hbase.pb.NameBytesPair exception = 3;</code>
29131        */
29132       public Builder clearException() {
29133         if (exceptionBuilder_ == null) {
29134           exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
29135           onChanged();
29136         } else {
29137           exceptionBuilder_.clear();
29138         }
29139         bitField0_ = (bitField0_ & ~0x00000004);
29140         return this;
29141       }
29142       /**
29143        * <code>optional .hbase.pb.NameBytesPair exception = 3;</code>
29144        */
29145       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getExceptionBuilder() {
29146         bitField0_ |= 0x00000004;
29147         onChanged();
29148         return getExceptionFieldBuilder().getBuilder();
29149       }
29150       /**
29151        * <code>optional .hbase.pb.NameBytesPair exception = 3;</code>
29152        */
29153       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder() {
29154         if (exceptionBuilder_ != null) {
29155           return exceptionBuilder_.getMessageOrBuilder();
29156         } else {
29157           return exception_;
29158         }
29159       }
29160       /**
29161        * <code>optional .hbase.pb.NameBytesPair exception = 3;</code>
29162        */
29163       private com.google.protobuf.SingleFieldBuilder<
29164           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> 
29165           getExceptionFieldBuilder() {
29166         if (exceptionBuilder_ == null) {
29167           exceptionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
29168               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>(
29169                   exception_,
29170                   getParentForChildren(),
29171                   isClean());
29172           exception_ = null;
29173         }
29174         return exceptionBuilder_;
29175       }
29176 
29177       // optional .hbase.pb.CoprocessorServiceResult service_result = 4;
29178       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult serviceResult_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.getDefaultInstance();
29179       private com.google.protobuf.SingleFieldBuilder<
29180           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResultOrBuilder> serviceResultBuilder_;
29181       /**
29182        * <code>optional .hbase.pb.CoprocessorServiceResult service_result = 4;</code>
29183        *
29184        * <pre>
29185        * result if this was a coprocessor service call
29186        * </pre>
29187        */
29188       public boolean hasServiceResult() {
29189         return ((bitField0_ & 0x00000008) == 0x00000008);
29190       }
29191       /**
29192        * <code>optional .hbase.pb.CoprocessorServiceResult service_result = 4;</code>
29193        *
29194        * <pre>
29195        * result if this was a coprocessor service call
29196        * </pre>
29197        */
29198       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult getServiceResult() {
29199         if (serviceResultBuilder_ == null) {
29200           return serviceResult_;
29201         } else {
29202           return serviceResultBuilder_.getMessage();
29203         }
29204       }
29205       /**
29206        * <code>optional .hbase.pb.CoprocessorServiceResult service_result = 4;</code>
29207        *
29208        * <pre>
29209        * result if this was a coprocessor service call
29210        * </pre>
29211        */
29212       public Builder setServiceResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult value) {
29213         if (serviceResultBuilder_ == null) {
29214           if (value == null) {
29215             throw new NullPointerException();
29216           }
29217           serviceResult_ = value;
29218           onChanged();
29219         } else {
29220           serviceResultBuilder_.setMessage(value);
29221         }
29222         bitField0_ |= 0x00000008;
29223         return this;
29224       }
29225       /**
29226        * <code>optional .hbase.pb.CoprocessorServiceResult service_result = 4;</code>
29227        *
29228        * <pre>
29229        * result if this was a coprocessor service call
29230        * </pre>
29231        */
29232       public Builder setServiceResult(
29233           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.Builder builderForValue) {
29234         if (serviceResultBuilder_ == null) {
29235           serviceResult_ = builderForValue.build();
29236           onChanged();
29237         } else {
29238           serviceResultBuilder_.setMessage(builderForValue.build());
29239         }
29240         bitField0_ |= 0x00000008;
29241         return this;
29242       }
29243       /**
29244        * <code>optional .hbase.pb.CoprocessorServiceResult service_result = 4;</code>
29245        *
29246        * <pre>
29247        * result if this was a coprocessor service call
29248        * </pre>
29249        */
29250       public Builder mergeServiceResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult value) {
29251         if (serviceResultBuilder_ == null) {
29252           if (((bitField0_ & 0x00000008) == 0x00000008) &&
29253               serviceResult_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.getDefaultInstance()) {
29254             serviceResult_ =
29255               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.newBuilder(serviceResult_).mergeFrom(value).buildPartial();
29256           } else {
29257             serviceResult_ = value;
29258           }
29259           onChanged();
29260         } else {
29261           serviceResultBuilder_.mergeFrom(value);
29262         }
29263         bitField0_ |= 0x00000008;
29264         return this;
29265       }
29266       /**
29267        * <code>optional .hbase.pb.CoprocessorServiceResult service_result = 4;</code>
29268        *
29269        * <pre>
29270        * result if this was a coprocessor service call
29271        * </pre>
29272        */
29273       public Builder clearServiceResult() {
29274         if (serviceResultBuilder_ == null) {
29275           serviceResult_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.getDefaultInstance();
29276           onChanged();
29277         } else {
29278           serviceResultBuilder_.clear();
29279         }
29280         bitField0_ = (bitField0_ & ~0x00000008);
29281         return this;
29282       }
29283       /**
29284        * <code>optional .hbase.pb.CoprocessorServiceResult service_result = 4;</code>
29285        *
29286        * <pre>
29287        * result if this was a coprocessor service call
29288        * </pre>
29289        */
29290       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.Builder getServiceResultBuilder() {
29291         bitField0_ |= 0x00000008;
29292         onChanged();
29293         return getServiceResultFieldBuilder().getBuilder();
29294       }
29295       /**
29296        * <code>optional .hbase.pb.CoprocessorServiceResult service_result = 4;</code>
29297        *
29298        * <pre>
29299        * result if this was a coprocessor service call
29300        * </pre>
29301        */
29302       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResultOrBuilder getServiceResultOrBuilder() {
29303         if (serviceResultBuilder_ != null) {
29304           return serviceResultBuilder_.getMessageOrBuilder();
29305         } else {
29306           return serviceResult_;
29307         }
29308       }
29309       /**
29310        * <code>optional .hbase.pb.CoprocessorServiceResult service_result = 4;</code>
29311        *
29312        * <pre>
29313        * result if this was a coprocessor service call
29314        * </pre>
29315        */
29316       private com.google.protobuf.SingleFieldBuilder<
29317           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResultOrBuilder> 
29318           getServiceResultFieldBuilder() {
29319         if (serviceResultBuilder_ == null) {
29320           serviceResultBuilder_ = new com.google.protobuf.SingleFieldBuilder<
29321               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResultOrBuilder>(
29322                   serviceResult_,
29323                   getParentForChildren(),
29324                   isClean());
29325           serviceResult_ = null;
29326         }
29327         return serviceResultBuilder_;
29328       }
29329 
29330       // optional .hbase.pb.RegionLoadStats loadStats = 5;
29331       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats loadStats_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.getDefaultInstance();
29332       private com.google.protobuf.SingleFieldBuilder<
29333           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder> loadStatsBuilder_;
29334       /**
29335        * <code>optional .hbase.pb.RegionLoadStats loadStats = 5;</code>
29336        *
29337        * <pre>
29338        * current load on the region
29339        * </pre>
29340        */
29341       public boolean hasLoadStats() {
29342         return ((bitField0_ & 0x00000010) == 0x00000010);
29343       }
29344       /**
29345        * <code>optional .hbase.pb.RegionLoadStats loadStats = 5;</code>
29346        *
29347        * <pre>
29348        * current load on the region
29349        * </pre>
29350        */
29351       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats getLoadStats() {
29352         if (loadStatsBuilder_ == null) {
29353           return loadStats_;
29354         } else {
29355           return loadStatsBuilder_.getMessage();
29356         }
29357       }
29358       /**
29359        * <code>optional .hbase.pb.RegionLoadStats loadStats = 5;</code>
29360        *
29361        * <pre>
29362        * current load on the region
29363        * </pre>
29364        */
29365       public Builder setLoadStats(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats value) {
29366         if (loadStatsBuilder_ == null) {
29367           if (value == null) {
29368             throw new NullPointerException();
29369           }
29370           loadStats_ = value;
29371           onChanged();
29372         } else {
29373           loadStatsBuilder_.setMessage(value);
29374         }
29375         bitField0_ |= 0x00000010;
29376         return this;
29377       }
29378       /**
29379        * <code>optional .hbase.pb.RegionLoadStats loadStats = 5;</code>
29380        *
29381        * <pre>
29382        * current load on the region
29383        * </pre>
29384        */
29385       public Builder setLoadStats(
29386           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.Builder builderForValue) {
29387         if (loadStatsBuilder_ == null) {
29388           loadStats_ = builderForValue.build();
29389           onChanged();
29390         } else {
29391           loadStatsBuilder_.setMessage(builderForValue.build());
29392         }
29393         bitField0_ |= 0x00000010;
29394         return this;
29395       }
29396       /**
29397        * <code>optional .hbase.pb.RegionLoadStats loadStats = 5;</code>
29398        *
29399        * <pre>
29400        * current load on the region
29401        * </pre>
29402        */
29403       public Builder mergeLoadStats(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats value) {
29404         if (loadStatsBuilder_ == null) {
29405           if (((bitField0_ & 0x00000010) == 0x00000010) &&
29406               loadStats_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.getDefaultInstance()) {
29407             loadStats_ =
29408               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.newBuilder(loadStats_).mergeFrom(value).buildPartial();
29409           } else {
29410             loadStats_ = value;
29411           }
29412           onChanged();
29413         } else {
29414           loadStatsBuilder_.mergeFrom(value);
29415         }
29416         bitField0_ |= 0x00000010;
29417         return this;
29418       }
29419       /**
29420        * <code>optional .hbase.pb.RegionLoadStats loadStats = 5;</code>
29421        *
29422        * <pre>
29423        * current load on the region
29424        * </pre>
29425        */
29426       public Builder clearLoadStats() {
29427         if (loadStatsBuilder_ == null) {
29428           loadStats_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.getDefaultInstance();
29429           onChanged();
29430         } else {
29431           loadStatsBuilder_.clear();
29432         }
29433         bitField0_ = (bitField0_ & ~0x00000010);
29434         return this;
29435       }
29436       /**
29437        * <code>optional .hbase.pb.RegionLoadStats loadStats = 5;</code>
29438        *
29439        * <pre>
29440        * current load on the region
29441        * </pre>
29442        */
29443       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.Builder getLoadStatsBuilder() {
29444         bitField0_ |= 0x00000010;
29445         onChanged();
29446         return getLoadStatsFieldBuilder().getBuilder();
29447       }
29448       /**
29449        * <code>optional .hbase.pb.RegionLoadStats loadStats = 5;</code>
29450        *
29451        * <pre>
29452        * current load on the region
29453        * </pre>
29454        */
29455       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder getLoadStatsOrBuilder() {
29456         if (loadStatsBuilder_ != null) {
29457           return loadStatsBuilder_.getMessageOrBuilder();
29458         } else {
29459           return loadStats_;
29460         }
29461       }
29462       /**
29463        * <code>optional .hbase.pb.RegionLoadStats loadStats = 5;</code>
29464        *
29465        * <pre>
29466        * current load on the region
29467        * </pre>
29468        */
29469       private com.google.protobuf.SingleFieldBuilder<
29470           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder> 
29471           getLoadStatsFieldBuilder() {
29472         if (loadStatsBuilder_ == null) {
29473           loadStatsBuilder_ = new com.google.protobuf.SingleFieldBuilder<
29474               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder>(
29475                   loadStats_,
29476                   getParentForChildren(),
29477                   isClean());
29478           loadStats_ = null;
29479         }
29480         return loadStatsBuilder_;
29481       }
29482 
29483       // @@protoc_insertion_point(builder_scope:hbase.pb.ResultOrException)
29484     }
29485 
29486     static {
29487       defaultInstance = new ResultOrException(true);
29488       defaultInstance.initFields();
29489     }
29490 
29491     // @@protoc_insertion_point(class_scope:hbase.pb.ResultOrException)
29492   }
29493 
29494   public interface RegionActionResultOrBuilder
29495       extends com.google.protobuf.MessageOrBuilder {
29496 
29497     // repeated .hbase.pb.ResultOrException resultOrException = 1;
29498     /**
29499      * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
29500      */
29501     java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException> 
29502         getResultOrExceptionList();
29503     /**
29504      * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
29505      */
29506     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException getResultOrException(int index);
29507     /**
29508      * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
29509      */
29510     int getResultOrExceptionCount();
29511     /**
29512      * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
29513      */
29514     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder> 
29515         getResultOrExceptionOrBuilderList();
29516     /**
29517      * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
29518      */
29519     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder getResultOrExceptionOrBuilder(
29520         int index);
29521 
29522     // optional .hbase.pb.NameBytesPair exception = 2;
29523     /**
29524      * <code>optional .hbase.pb.NameBytesPair exception = 2;</code>
29525      *
29526      * <pre>
29527      * If the operation failed globally for this region, this exception is set
29528      * </pre>
29529      */
29530     boolean hasException();
29531     /**
29532      * <code>optional .hbase.pb.NameBytesPair exception = 2;</code>
29533      *
29534      * <pre>
29535      * If the operation failed globally for this region, this exception is set
29536      * </pre>
29537      */
29538     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getException();
29539     /**
29540      * <code>optional .hbase.pb.NameBytesPair exception = 2;</code>
29541      *
29542      * <pre>
29543      * If the operation failed globally for this region, this exception is set
29544      * </pre>
29545      */
29546     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder();
29547   }
29548   /**
29549    * Protobuf type {@code hbase.pb.RegionActionResult}
29550    *
29551    * <pre>
29552    **
29553    * The result of a RegionAction.
29554    * </pre>
29555    */
29556   public static final class RegionActionResult extends
29557       com.google.protobuf.GeneratedMessage
29558       implements RegionActionResultOrBuilder {
29559     // Use RegionActionResult.newBuilder() to construct.
29560     private RegionActionResult(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
29561       super(builder);
29562       this.unknownFields = builder.getUnknownFields();
29563     }
29564     private RegionActionResult(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
29565 
29566     private static final RegionActionResult defaultInstance;
29567     public static RegionActionResult getDefaultInstance() {
29568       return defaultInstance;
29569     }
29570 
29571     public RegionActionResult getDefaultInstanceForType() {
29572       return defaultInstance;
29573     }
29574 
29575     private final com.google.protobuf.UnknownFieldSet unknownFields;
29576     @java.lang.Override
29577     public final com.google.protobuf.UnknownFieldSet
29578         getUnknownFields() {
29579       return this.unknownFields;
29580     }
29581     private RegionActionResult(
29582         com.google.protobuf.CodedInputStream input,
29583         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
29584         throws com.google.protobuf.InvalidProtocolBufferException {
29585       initFields();
29586       int mutable_bitField0_ = 0;
29587       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
29588           com.google.protobuf.UnknownFieldSet.newBuilder();
29589       try {
29590         boolean done = false;
29591         while (!done) {
29592           int tag = input.readTag();
29593           switch (tag) {
29594             case 0:
29595               done = true;
29596               break;
29597             default: {
29598               if (!parseUnknownField(input, unknownFields,
29599                                      extensionRegistry, tag)) {
29600                 done = true;
29601               }
29602               break;
29603             }
29604             case 10: {
29605               if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
29606                 resultOrException_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException>();
29607                 mutable_bitField0_ |= 0x00000001;
29608               }
29609               resultOrException_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.PARSER, extensionRegistry));
29610               break;
29611             }
29612             case 18: {
29613               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = null;
29614               if (((bitField0_ & 0x00000001) == 0x00000001)) {
29615                 subBuilder = exception_.toBuilder();
29616               }
29617               exception_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.PARSER, extensionRegistry);
29618               if (subBuilder != null) {
29619                 subBuilder.mergeFrom(exception_);
29620                 exception_ = subBuilder.buildPartial();
29621               }
29622               bitField0_ |= 0x00000001;
29623               break;
29624             }
29625           }
29626         }
29627       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
29628         throw e.setUnfinishedMessage(this);
29629       } catch (java.io.IOException e) {
29630         throw new com.google.protobuf.InvalidProtocolBufferException(
29631             e.getMessage()).setUnfinishedMessage(this);
29632       } finally {
29633         if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
29634           resultOrException_ = java.util.Collections.unmodifiableList(resultOrException_);
29635         }
29636         this.unknownFields = unknownFields.build();
29637         makeExtensionsImmutable();
29638       }
29639     }
29640     public static final com.google.protobuf.Descriptors.Descriptor
29641         getDescriptor() {
29642       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionActionResult_descriptor;
29643     }
29644 
29645     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
29646         internalGetFieldAccessorTable() {
29647       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionActionResult_fieldAccessorTable
29648           .ensureFieldAccessorsInitialized(
29649               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder.class);
29650     }
29651 
29652     public static com.google.protobuf.Parser<RegionActionResult> PARSER =
29653         new com.google.protobuf.AbstractParser<RegionActionResult>() {
29654       public RegionActionResult parsePartialFrom(
29655           com.google.protobuf.CodedInputStream input,
29656           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
29657           throws com.google.protobuf.InvalidProtocolBufferException {
29658         return new RegionActionResult(input, extensionRegistry);
29659       }
29660     };
29661 
29662     @java.lang.Override
29663     public com.google.protobuf.Parser<RegionActionResult> getParserForType() {
29664       return PARSER;
29665     }
29666 
29667     private int bitField0_;
29668     // repeated .hbase.pb.ResultOrException resultOrException = 1;
29669     public static final int RESULTOREXCEPTION_FIELD_NUMBER = 1;
29670     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException> resultOrException_;
29671     /**
29672      * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
29673      */
29674     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException> getResultOrExceptionList() {
29675       return resultOrException_;
29676     }
29677     /**
29678      * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
29679      */
29680     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder> 
29681         getResultOrExceptionOrBuilderList() {
29682       return resultOrException_;
29683     }
29684     /**
29685      * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
29686      */
29687     public int getResultOrExceptionCount() {
29688       return resultOrException_.size();
29689     }
29690     /**
29691      * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
29692      */
29693     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException getResultOrException(int index) {
29694       return resultOrException_.get(index);
29695     }
29696     /**
29697      * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
29698      */
29699     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder getResultOrExceptionOrBuilder(
29700         int index) {
29701       return resultOrException_.get(index);
29702     }
29703 
29704     // optional .hbase.pb.NameBytesPair exception = 2;
29705     public static final int EXCEPTION_FIELD_NUMBER = 2;
29706     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair exception_;
29707     /**
29708      * <code>optional .hbase.pb.NameBytesPair exception = 2;</code>
29709      *
29710      * <pre>
29711      * If the operation failed globally for this region, this exception is set
29712      * </pre>
29713      */
29714     public boolean hasException() {
29715       return ((bitField0_ & 0x00000001) == 0x00000001);
29716     }
29717     /**
29718      * <code>optional .hbase.pb.NameBytesPair exception = 2;</code>
29719      *
29720      * <pre>
29721      * If the operation failed globally for this region, this exception is set
29722      * </pre>
29723      */
29724     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getException() {
29725       return exception_;
29726     }
29727     /**
29728      * <code>optional .hbase.pb.NameBytesPair exception = 2;</code>
29729      *
29730      * <pre>
29731      * If the operation failed globally for this region, this exception is set
29732      * </pre>
29733      */
29734     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder() {
29735       return exception_;
29736     }
29737 
29738     private void initFields() {
29739       resultOrException_ = java.util.Collections.emptyList();
29740       exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
29741     }
29742     private byte memoizedIsInitialized = -1;
29743     public final boolean isInitialized() {
29744       byte isInitialized = memoizedIsInitialized;
29745       if (isInitialized != -1) return isInitialized == 1;
29746 
29747       for (int i = 0; i < getResultOrExceptionCount(); i++) {
29748         if (!getResultOrException(i).isInitialized()) {
29749           memoizedIsInitialized = 0;
29750           return false;
29751         }
29752       }
29753       if (hasException()) {
29754         if (!getException().isInitialized()) {
29755           memoizedIsInitialized = 0;
29756           return false;
29757         }
29758       }
29759       memoizedIsInitialized = 1;
29760       return true;
29761     }
29762 
29763     public void writeTo(com.google.protobuf.CodedOutputStream output)
29764                         throws java.io.IOException {
29765       getSerializedSize();
29766       for (int i = 0; i < resultOrException_.size(); i++) {
29767         output.writeMessage(1, resultOrException_.get(i));
29768       }
29769       if (((bitField0_ & 0x00000001) == 0x00000001)) {
29770         output.writeMessage(2, exception_);
29771       }
29772       getUnknownFields().writeTo(output);
29773     }
29774 
29775     private int memoizedSerializedSize = -1;
29776     public int getSerializedSize() {
29777       int size = memoizedSerializedSize;
29778       if (size != -1) return size;
29779 
29780       size = 0;
29781       for (int i = 0; i < resultOrException_.size(); i++) {
29782         size += com.google.protobuf.CodedOutputStream
29783           .computeMessageSize(1, resultOrException_.get(i));
29784       }
29785       if (((bitField0_ & 0x00000001) == 0x00000001)) {
29786         size += com.google.protobuf.CodedOutputStream
29787           .computeMessageSize(2, exception_);
29788       }
29789       size += getUnknownFields().getSerializedSize();
29790       memoizedSerializedSize = size;
29791       return size;
29792     }
29793 
29794     private static final long serialVersionUID = 0L;
29795     @java.lang.Override
29796     protected java.lang.Object writeReplace()
29797         throws java.io.ObjectStreamException {
29798       return super.writeReplace();
29799     }
29800 
29801     @java.lang.Override
29802     public boolean equals(final java.lang.Object obj) {
29803       if (obj == this) {
29804        return true;
29805       }
29806       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult)) {
29807         return super.equals(obj);
29808       }
29809       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult) obj;
29810 
29811       boolean result = true;
29812       result = result && getResultOrExceptionList()
29813           .equals(other.getResultOrExceptionList());
29814       result = result && (hasException() == other.hasException());
29815       if (hasException()) {
29816         result = result && getException()
29817             .equals(other.getException());
29818       }
29819       result = result &&
29820           getUnknownFields().equals(other.getUnknownFields());
29821       return result;
29822     }
29823 
29824     private int memoizedHashCode = 0;
29825     @java.lang.Override
29826     public int hashCode() {
29827       if (memoizedHashCode != 0) {
29828         return memoizedHashCode;
29829       }
29830       int hash = 41;
29831       hash = (19 * hash) + getDescriptorForType().hashCode();
29832       if (getResultOrExceptionCount() > 0) {
29833         hash = (37 * hash) + RESULTOREXCEPTION_FIELD_NUMBER;
29834         hash = (53 * hash) + getResultOrExceptionList().hashCode();
29835       }
29836       if (hasException()) {
29837         hash = (37 * hash) + EXCEPTION_FIELD_NUMBER;
29838         hash = (53 * hash) + getException().hashCode();
29839       }
29840       hash = (29 * hash) + getUnknownFields().hashCode();
29841       memoizedHashCode = hash;
29842       return hash;
29843     }
29844 
29845     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult parseFrom(
29846         com.google.protobuf.ByteString data)
29847         throws com.google.protobuf.InvalidProtocolBufferException {
29848       return PARSER.parseFrom(data);
29849     }
29850     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult parseFrom(
29851         com.google.protobuf.ByteString data,
29852         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
29853         throws com.google.protobuf.InvalidProtocolBufferException {
29854       return PARSER.parseFrom(data, extensionRegistry);
29855     }
29856     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult parseFrom(byte[] data)
29857         throws com.google.protobuf.InvalidProtocolBufferException {
29858       return PARSER.parseFrom(data);
29859     }
29860     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult parseFrom(
29861         byte[] data,
29862         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
29863         throws com.google.protobuf.InvalidProtocolBufferException {
29864       return PARSER.parseFrom(data, extensionRegistry);
29865     }
29866     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult parseFrom(java.io.InputStream input)
29867         throws java.io.IOException {
29868       return PARSER.parseFrom(input);
29869     }
29870     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult parseFrom(
29871         java.io.InputStream input,
29872         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
29873         throws java.io.IOException {
29874       return PARSER.parseFrom(input, extensionRegistry);
29875     }
29876     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult parseDelimitedFrom(java.io.InputStream input)
29877         throws java.io.IOException {
29878       return PARSER.parseDelimitedFrom(input);
29879     }
29880     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult parseDelimitedFrom(
29881         java.io.InputStream input,
29882         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
29883         throws java.io.IOException {
29884       return PARSER.parseDelimitedFrom(input, extensionRegistry);
29885     }
29886     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult parseFrom(
29887         com.google.protobuf.CodedInputStream input)
29888         throws java.io.IOException {
29889       return PARSER.parseFrom(input);
29890     }
29891     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult parseFrom(
29892         com.google.protobuf.CodedInputStream input,
29893         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
29894         throws java.io.IOException {
29895       return PARSER.parseFrom(input, extensionRegistry);
29896     }
29897 
29898     public static Builder newBuilder() { return Builder.create(); }
29899     public Builder newBuilderForType() { return newBuilder(); }
29900     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult prototype) {
29901       return newBuilder().mergeFrom(prototype);
29902     }
29903     public Builder toBuilder() { return newBuilder(this); }
29904 
29905     @java.lang.Override
29906     protected Builder newBuilderForType(
29907         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
29908       Builder builder = new Builder(parent);
29909       return builder;
29910     }
29911     /**
29912      * Protobuf type {@code hbase.pb.RegionActionResult}
29913      *
29914      * <pre>
29915      **
29916      * The result of a RegionAction.
29917      * </pre>
29918      */
29919     public static final class Builder extends
29920         com.google.protobuf.GeneratedMessage.Builder<Builder>
29921        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder {
29922       public static final com.google.protobuf.Descriptors.Descriptor
29923           getDescriptor() {
29924         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionActionResult_descriptor;
29925       }
29926 
29927       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
29928           internalGetFieldAccessorTable() {
29929         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionActionResult_fieldAccessorTable
29930             .ensureFieldAccessorsInitialized(
29931                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder.class);
29932       }
29933 
29934       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.newBuilder()
29935       private Builder() {
29936         maybeForceBuilderInitialization();
29937       }
29938 
29939       private Builder(
29940           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
29941         super(parent);
29942         maybeForceBuilderInitialization();
29943       }
29944       private void maybeForceBuilderInitialization() {
29945         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
29946           getResultOrExceptionFieldBuilder();
29947           getExceptionFieldBuilder();
29948         }
29949       }
29950       private static Builder create() {
29951         return new Builder();
29952       }
29953 
29954       public Builder clear() {
29955         super.clear();
29956         if (resultOrExceptionBuilder_ == null) {
29957           resultOrException_ = java.util.Collections.emptyList();
29958           bitField0_ = (bitField0_ & ~0x00000001);
29959         } else {
29960           resultOrExceptionBuilder_.clear();
29961         }
29962         if (exceptionBuilder_ == null) {
29963           exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
29964         } else {
29965           exceptionBuilder_.clear();
29966         }
29967         bitField0_ = (bitField0_ & ~0x00000002);
29968         return this;
29969       }
29970 
29971       public Builder clone() {
29972         return create().mergeFrom(buildPartial());
29973       }
29974 
29975       public com.google.protobuf.Descriptors.Descriptor
29976           getDescriptorForType() {
29977         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionActionResult_descriptor;
29978       }
29979 
29980       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult getDefaultInstanceForType() {
29981         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.getDefaultInstance();
29982       }
29983 
29984       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult build() {
29985         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult result = buildPartial();
29986         if (!result.isInitialized()) {
29987           throw newUninitializedMessageException(result);
29988         }
29989         return result;
29990       }
29991 
29992       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult buildPartial() {
29993         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult(this);
29994         int from_bitField0_ = bitField0_;
29995         int to_bitField0_ = 0;
29996         if (resultOrExceptionBuilder_ == null) {
29997           if (((bitField0_ & 0x00000001) == 0x00000001)) {
29998             resultOrException_ = java.util.Collections.unmodifiableList(resultOrException_);
29999             bitField0_ = (bitField0_ & ~0x00000001);
30000           }
30001           result.resultOrException_ = resultOrException_;
30002         } else {
30003           result.resultOrException_ = resultOrExceptionBuilder_.build();
30004         }
30005         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
30006           to_bitField0_ |= 0x00000001;
30007         }
30008         if (exceptionBuilder_ == null) {
30009           result.exception_ = exception_;
30010         } else {
30011           result.exception_ = exceptionBuilder_.build();
30012         }
30013         result.bitField0_ = to_bitField0_;
30014         onBuilt();
30015         return result;
30016       }
30017 
30018       public Builder mergeFrom(com.google.protobuf.Message other) {
30019         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult) {
30020           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult)other);
30021         } else {
30022           super.mergeFrom(other);
30023           return this;
30024         }
30025       }
30026 
30027       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult other) {
30028         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.getDefaultInstance()) return this;
30029         if (resultOrExceptionBuilder_ == null) {
30030           if (!other.resultOrException_.isEmpty()) {
30031             if (resultOrException_.isEmpty()) {
30032               resultOrException_ = other.resultOrException_;
30033               bitField0_ = (bitField0_ & ~0x00000001);
30034             } else {
30035               ensureResultOrExceptionIsMutable();
30036               resultOrException_.addAll(other.resultOrException_);
30037             }
30038             onChanged();
30039           }
30040         } else {
30041           if (!other.resultOrException_.isEmpty()) {
30042             if (resultOrExceptionBuilder_.isEmpty()) {
30043               resultOrExceptionBuilder_.dispose();
30044               resultOrExceptionBuilder_ = null;
30045               resultOrException_ = other.resultOrException_;
30046               bitField0_ = (bitField0_ & ~0x00000001);
30047               resultOrExceptionBuilder_ = 
30048                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
30049                    getResultOrExceptionFieldBuilder() : null;
30050             } else {
30051               resultOrExceptionBuilder_.addAllMessages(other.resultOrException_);
30052             }
30053           }
30054         }
30055         if (other.hasException()) {
30056           mergeException(other.getException());
30057         }
30058         this.mergeUnknownFields(other.getUnknownFields());
30059         return this;
30060       }
30061 
30062       public final boolean isInitialized() {
30063         for (int i = 0; i < getResultOrExceptionCount(); i++) {
30064           if (!getResultOrException(i).isInitialized()) {
30065             
30066             return false;
30067           }
30068         }
30069         if (hasException()) {
30070           if (!getException().isInitialized()) {
30071             
30072             return false;
30073           }
30074         }
30075         return true;
30076       }
30077 
30078       public Builder mergeFrom(
30079           com.google.protobuf.CodedInputStream input,
30080           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
30081           throws java.io.IOException {
30082         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult parsedMessage = null;
30083         try {
30084           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
30085         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
30086           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult) e.getUnfinishedMessage();
30087           throw e;
30088         } finally {
30089           if (parsedMessage != null) {
30090             mergeFrom(parsedMessage);
30091           }
30092         }
30093         return this;
30094       }
30095       private int bitField0_;
30096 
30097       // repeated .hbase.pb.ResultOrException resultOrException = 1;
30098       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException> resultOrException_ =
30099         java.util.Collections.emptyList();
30100       private void ensureResultOrExceptionIsMutable() {
30101         if (!((bitField0_ & 0x00000001) == 0x00000001)) {
30102           resultOrException_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException>(resultOrException_);
30103           bitField0_ |= 0x00000001;
30104          }
30105       }
30106 
30107       private com.google.protobuf.RepeatedFieldBuilder<
30108           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder> resultOrExceptionBuilder_;
30109 
30110       /**
30111        * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
30112        */
30113       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException> getResultOrExceptionList() {
30114         if (resultOrExceptionBuilder_ == null) {
30115           return java.util.Collections.unmodifiableList(resultOrException_);
30116         } else {
30117           return resultOrExceptionBuilder_.getMessageList();
30118         }
30119       }
30120       /**
30121        * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
30122        */
30123       public int getResultOrExceptionCount() {
30124         if (resultOrExceptionBuilder_ == null) {
30125           return resultOrException_.size();
30126         } else {
30127           return resultOrExceptionBuilder_.getCount();
30128         }
30129       }
30130       /**
30131        * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
30132        */
30133       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException getResultOrException(int index) {
30134         if (resultOrExceptionBuilder_ == null) {
30135           return resultOrException_.get(index);
30136         } else {
30137           return resultOrExceptionBuilder_.getMessage(index);
30138         }
30139       }
30140       /**
30141        * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
30142        */
30143       public Builder setResultOrException(
30144           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException value) {
30145         if (resultOrExceptionBuilder_ == null) {
30146           if (value == null) {
30147             throw new NullPointerException();
30148           }
30149           ensureResultOrExceptionIsMutable();
30150           resultOrException_.set(index, value);
30151           onChanged();
30152         } else {
30153           resultOrExceptionBuilder_.setMessage(index, value);
30154         }
30155         return this;
30156       }
30157       /**
30158        * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
30159        */
30160       public Builder setResultOrException(
30161           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder builderForValue) {
30162         if (resultOrExceptionBuilder_ == null) {
30163           ensureResultOrExceptionIsMutable();
30164           resultOrException_.set(index, builderForValue.build());
30165           onChanged();
30166         } else {
30167           resultOrExceptionBuilder_.setMessage(index, builderForValue.build());
30168         }
30169         return this;
30170       }
30171       /**
30172        * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
30173        */
30174       public Builder addResultOrException(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException value) {
30175         if (resultOrExceptionBuilder_ == null) {
30176           if (value == null) {
30177             throw new NullPointerException();
30178           }
30179           ensureResultOrExceptionIsMutable();
30180           resultOrException_.add(value);
30181           onChanged();
30182         } else {
30183           resultOrExceptionBuilder_.addMessage(value);
30184         }
30185         return this;
30186       }
30187       /**
30188        * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
30189        */
30190       public Builder addResultOrException(
30191           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException value) {
30192         if (resultOrExceptionBuilder_ == null) {
30193           if (value == null) {
30194             throw new NullPointerException();
30195           }
30196           ensureResultOrExceptionIsMutable();
30197           resultOrException_.add(index, value);
30198           onChanged();
30199         } else {
30200           resultOrExceptionBuilder_.addMessage(index, value);
30201         }
30202         return this;
30203       }
30204       /**
30205        * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
30206        */
30207       public Builder addResultOrException(
30208           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder builderForValue) {
30209         if (resultOrExceptionBuilder_ == null) {
30210           ensureResultOrExceptionIsMutable();
30211           resultOrException_.add(builderForValue.build());
30212           onChanged();
30213         } else {
30214           resultOrExceptionBuilder_.addMessage(builderForValue.build());
30215         }
30216         return this;
30217       }
30218       /**
30219        * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
30220        */
30221       public Builder addResultOrException(
30222           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder builderForValue) {
30223         if (resultOrExceptionBuilder_ == null) {
30224           ensureResultOrExceptionIsMutable();
30225           resultOrException_.add(index, builderForValue.build());
30226           onChanged();
30227         } else {
30228           resultOrExceptionBuilder_.addMessage(index, builderForValue.build());
30229         }
30230         return this;
30231       }
30232       /**
30233        * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
30234        */
30235       public Builder addAllResultOrException(
30236           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException> values) {
30237         if (resultOrExceptionBuilder_ == null) {
30238           ensureResultOrExceptionIsMutable();
30239           super.addAll(values, resultOrException_);
30240           onChanged();
30241         } else {
30242           resultOrExceptionBuilder_.addAllMessages(values);
30243         }
30244         return this;
30245       }
30246       /**
30247        * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
30248        */
30249       public Builder clearResultOrException() {
30250         if (resultOrExceptionBuilder_ == null) {
30251           resultOrException_ = java.util.Collections.emptyList();
30252           bitField0_ = (bitField0_ & ~0x00000001);
30253           onChanged();
30254         } else {
30255           resultOrExceptionBuilder_.clear();
30256         }
30257         return this;
30258       }
30259       /**
30260        * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
30261        */
30262       public Builder removeResultOrException(int index) {
30263         if (resultOrExceptionBuilder_ == null) {
30264           ensureResultOrExceptionIsMutable();
30265           resultOrException_.remove(index);
30266           onChanged();
30267         } else {
30268           resultOrExceptionBuilder_.remove(index);
30269         }
30270         return this;
30271       }
30272       /**
30273        * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
30274        */
30275       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder getResultOrExceptionBuilder(
30276           int index) {
30277         return getResultOrExceptionFieldBuilder().getBuilder(index);
30278       }
30279       /**
30280        * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
30281        */
30282       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder getResultOrExceptionOrBuilder(
30283           int index) {
30284         if (resultOrExceptionBuilder_ == null) {
30285           return resultOrException_.get(index);  } else {
30286           return resultOrExceptionBuilder_.getMessageOrBuilder(index);
30287         }
30288       }
30289       /**
30290        * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
30291        */
30292       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder> 
30293            getResultOrExceptionOrBuilderList() {
30294         if (resultOrExceptionBuilder_ != null) {
30295           return resultOrExceptionBuilder_.getMessageOrBuilderList();
30296         } else {
30297           return java.util.Collections.unmodifiableList(resultOrException_);
30298         }
30299       }
30300       /**
30301        * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
30302        */
30303       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder addResultOrExceptionBuilder() {
30304         return getResultOrExceptionFieldBuilder().addBuilder(
30305             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.getDefaultInstance());
30306       }
30307       /**
30308        * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
30309        */
30310       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder addResultOrExceptionBuilder(
30311           int index) {
30312         return getResultOrExceptionFieldBuilder().addBuilder(
30313             index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.getDefaultInstance());
30314       }
30315       /**
30316        * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
30317        */
30318       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder> 
30319            getResultOrExceptionBuilderList() {
30320         return getResultOrExceptionFieldBuilder().getBuilderList();
30321       }
30322       private com.google.protobuf.RepeatedFieldBuilder<
30323           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder> 
30324           getResultOrExceptionFieldBuilder() {
30325         if (resultOrExceptionBuilder_ == null) {
30326           resultOrExceptionBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
30327               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder>(
30328                   resultOrException_,
30329                   ((bitField0_ & 0x00000001) == 0x00000001),
30330                   getParentForChildren(),
30331                   isClean());
30332           resultOrException_ = null;
30333         }
30334         return resultOrExceptionBuilder_;
30335       }
30336 
30337       // optional .hbase.pb.NameBytesPair exception = 2;
30338       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
30339       private com.google.protobuf.SingleFieldBuilder<
30340           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> exceptionBuilder_;
30341       /**
30342        * <code>optional .hbase.pb.NameBytesPair exception = 2;</code>
30343        *
30344        * <pre>
30345        * If the operation failed globally for this region, this exception is set
30346        * </pre>
30347        */
30348       public boolean hasException() {
30349         return ((bitField0_ & 0x00000002) == 0x00000002);
30350       }
30351       /**
30352        * <code>optional .hbase.pb.NameBytesPair exception = 2;</code>
30353        *
30354        * <pre>
30355        * If the operation failed globally for this region, this exception is set
30356        * </pre>
30357        */
30358       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getException() {
30359         if (exceptionBuilder_ == null) {
30360           return exception_;
30361         } else {
30362           return exceptionBuilder_.getMessage();
30363         }
30364       }
30365       /**
30366        * <code>optional .hbase.pb.NameBytesPair exception = 2;</code>
30367        *
30368        * <pre>
30369        * If the operation failed globally for this region, this exception is set
30370        * </pre>
30371        */
30372       public Builder setException(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
30373         if (exceptionBuilder_ == null) {
30374           if (value == null) {
30375             throw new NullPointerException();
30376           }
30377           exception_ = value;
30378           onChanged();
30379         } else {
30380           exceptionBuilder_.setMessage(value);
30381         }
30382         bitField0_ |= 0x00000002;
30383         return this;
30384       }
30385       /**
30386        * <code>optional .hbase.pb.NameBytesPair exception = 2;</code>
30387        *
30388        * <pre>
30389        * If the operation failed globally for this region, this exception is set
30390        * </pre>
30391        */
30392       public Builder setException(
30393           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
30394         if (exceptionBuilder_ == null) {
30395           exception_ = builderForValue.build();
30396           onChanged();
30397         } else {
30398           exceptionBuilder_.setMessage(builderForValue.build());
30399         }
30400         bitField0_ |= 0x00000002;
30401         return this;
30402       }
30403       /**
30404        * <code>optional .hbase.pb.NameBytesPair exception = 2;</code>
30405        *
30406        * <pre>
30407        * If the operation failed globally for this region, this exception is set
30408        * </pre>
30409        */
30410       public Builder mergeException(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
30411         if (exceptionBuilder_ == null) {
30412           if (((bitField0_ & 0x00000002) == 0x00000002) &&
30413               exception_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) {
30414             exception_ =
30415               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(exception_).mergeFrom(value).buildPartial();
30416           } else {
30417             exception_ = value;
30418           }
30419           onChanged();
30420         } else {
30421           exceptionBuilder_.mergeFrom(value);
30422         }
30423         bitField0_ |= 0x00000002;
30424         return this;
30425       }
30426       /**
30427        * <code>optional .hbase.pb.NameBytesPair exception = 2;</code>
30428        *
30429        * <pre>
30430        * If the operation failed globally for this region, this exception is set
30431        * </pre>
30432        */
30433       public Builder clearException() {
30434         if (exceptionBuilder_ == null) {
30435           exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
30436           onChanged();
30437         } else {
30438           exceptionBuilder_.clear();
30439         }
30440         bitField0_ = (bitField0_ & ~0x00000002);
30441         return this;
30442       }
30443       /**
30444        * <code>optional .hbase.pb.NameBytesPair exception = 2;</code>
30445        *
30446        * <pre>
30447        * If the operation failed globally for this region, this exception is set
30448        * </pre>
30449        */
30450       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getExceptionBuilder() {
30451         bitField0_ |= 0x00000002;
30452         onChanged();
30453         return getExceptionFieldBuilder().getBuilder();
30454       }
30455       /**
30456        * <code>optional .hbase.pb.NameBytesPair exception = 2;</code>
30457        *
30458        * <pre>
30459        * If the operation failed globally for this region, this exception is set
30460        * </pre>
30461        */
30462       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder() {
30463         if (exceptionBuilder_ != null) {
30464           return exceptionBuilder_.getMessageOrBuilder();
30465         } else {
30466           return exception_;
30467         }
30468       }
30469       /**
30470        * <code>optional .hbase.pb.NameBytesPair exception = 2;</code>
30471        *
30472        * <pre>
30473        * If the operation failed globally for this region, this exception is set
30474        * </pre>
30475        */
30476       private com.google.protobuf.SingleFieldBuilder<
30477           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> 
30478           getExceptionFieldBuilder() {
30479         if (exceptionBuilder_ == null) {
30480           exceptionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
30481               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>(
30482                   exception_,
30483                   getParentForChildren(),
30484                   isClean());
30485           exception_ = null;
30486         }
30487         return exceptionBuilder_;
30488       }
30489 
30490       // @@protoc_insertion_point(builder_scope:hbase.pb.RegionActionResult)
30491     }
30492 
30493     static {
30494       defaultInstance = new RegionActionResult(true);
30495       defaultInstance.initFields();
30496     }
30497 
30498     // @@protoc_insertion_point(class_scope:hbase.pb.RegionActionResult)
30499   }
30500 
30501   public interface MultiRequestOrBuilder
30502       extends com.google.protobuf.MessageOrBuilder {
30503 
30504     // repeated .hbase.pb.RegionAction regionAction = 1;
30505     /**
30506      * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
30507      */
30508     java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction> 
30509         getRegionActionList();
30510     /**
30511      * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
30512      */
30513     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction getRegionAction(int index);
30514     /**
30515      * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
30516      */
30517     int getRegionActionCount();
30518     /**
30519      * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
30520      */
30521     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionOrBuilder> 
30522         getRegionActionOrBuilderList();
30523     /**
30524      * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
30525      */
30526     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionOrBuilder getRegionActionOrBuilder(
30527         int index);
30528 
30529     // optional uint64 nonceGroup = 2;
30530     /**
30531      * <code>optional uint64 nonceGroup = 2;</code>
30532      */
30533     boolean hasNonceGroup();
30534     /**
30535      * <code>optional uint64 nonceGroup = 2;</code>
30536      */
30537     long getNonceGroup();
30538 
30539     // optional .hbase.pb.Condition condition = 3;
30540     /**
30541      * <code>optional .hbase.pb.Condition condition = 3;</code>
30542      */
30543     boolean hasCondition();
30544     /**
30545      * <code>optional .hbase.pb.Condition condition = 3;</code>
30546      */
30547     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition getCondition();
30548     /**
30549      * <code>optional .hbase.pb.Condition condition = 3;</code>
30550      */
30551     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder getConditionOrBuilder();
30552   }
30553   /**
30554    * Protobuf type {@code hbase.pb.MultiRequest}
30555    *
30556    * <pre>
30557    **
30558    * Execute a list of actions on a given region in order.
30559    * Nothing prevents a request to contains a set of RegionAction on the same region.
30560    * For this reason, the matching between the MultiRequest and the MultiResponse is not
30561    *  done by the region specifier but by keeping the order of the RegionActionResult vs.
30562    *  the order of the RegionAction.
30563    * </pre>
30564    */
30565   public static final class MultiRequest extends
30566       com.google.protobuf.GeneratedMessage
30567       implements MultiRequestOrBuilder {
30568     // Use MultiRequest.newBuilder() to construct.
30569     private MultiRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
30570       super(builder);
30571       this.unknownFields = builder.getUnknownFields();
30572     }
30573     private MultiRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
30574 
30575     private static final MultiRequest defaultInstance;
30576     public static MultiRequest getDefaultInstance() {
30577       return defaultInstance;
30578     }
30579 
30580     public MultiRequest getDefaultInstanceForType() {
30581       return defaultInstance;
30582     }
30583 
30584     private final com.google.protobuf.UnknownFieldSet unknownFields;
30585     @java.lang.Override
30586     public final com.google.protobuf.UnknownFieldSet
30587         getUnknownFields() {
30588       return this.unknownFields;
30589     }
30590     private MultiRequest(
30591         com.google.protobuf.CodedInputStream input,
30592         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
30593         throws com.google.protobuf.InvalidProtocolBufferException {
30594       initFields();
30595       int mutable_bitField0_ = 0;
30596       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
30597           com.google.protobuf.UnknownFieldSet.newBuilder();
30598       try {
30599         boolean done = false;
30600         while (!done) {
30601           int tag = input.readTag();
30602           switch (tag) {
30603             case 0:
30604               done = true;
30605               break;
30606             default: {
30607               if (!parseUnknownField(input, unknownFields,
30608                                      extensionRegistry, tag)) {
30609                 done = true;
30610               }
30611               break;
30612             }
30613             case 10: {
30614               if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
30615                 regionAction_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction>();
30616                 mutable_bitField0_ |= 0x00000001;
30617               }
30618               regionAction_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.PARSER, extensionRegistry));
30619               break;
30620             }
30621             case 16: {
30622               bitField0_ |= 0x00000001;
30623               nonceGroup_ = input.readUInt64();
30624               break;
30625             }
30626             case 26: {
30627               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder subBuilder = null;
30628               if (((bitField0_ & 0x00000002) == 0x00000002)) {
30629                 subBuilder = condition_.toBuilder();
30630               }
30631               condition_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.PARSER, extensionRegistry);
30632               if (subBuilder != null) {
30633                 subBuilder.mergeFrom(condition_);
30634                 condition_ = subBuilder.buildPartial();
30635               }
30636               bitField0_ |= 0x00000002;
30637               break;
30638             }
30639           }
30640         }
30641       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
30642         throw e.setUnfinishedMessage(this);
30643       } catch (java.io.IOException e) {
30644         throw new com.google.protobuf.InvalidProtocolBufferException(
30645             e.getMessage()).setUnfinishedMessage(this);
30646       } finally {
30647         if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
30648           regionAction_ = java.util.Collections.unmodifiableList(regionAction_);
30649         }
30650         this.unknownFields = unknownFields.build();
30651         makeExtensionsImmutable();
30652       }
30653     }
30654     public static final com.google.protobuf.Descriptors.Descriptor
30655         getDescriptor() {
30656       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiRequest_descriptor;
30657     }
30658 
30659     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
30660         internalGetFieldAccessorTable() {
30661       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiRequest_fieldAccessorTable
30662           .ensureFieldAccessorsInitialized(
30663               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.Builder.class);
30664     }
30665 
30666     public static com.google.protobuf.Parser<MultiRequest> PARSER =
30667         new com.google.protobuf.AbstractParser<MultiRequest>() {
30668       public MultiRequest parsePartialFrom(
30669           com.google.protobuf.CodedInputStream input,
30670           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
30671           throws com.google.protobuf.InvalidProtocolBufferException {
30672         return new MultiRequest(input, extensionRegistry);
30673       }
30674     };
30675 
30676     @java.lang.Override
30677     public com.google.protobuf.Parser<MultiRequest> getParserForType() {
30678       return PARSER;
30679     }
30680 
30681     private int bitField0_;
30682     // repeated .hbase.pb.RegionAction regionAction = 1;
30683     public static final int REGIONACTION_FIELD_NUMBER = 1;
30684     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction> regionAction_;
30685     /**
30686      * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
30687      */
30688     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction> getRegionActionList() {
30689       return regionAction_;
30690     }
30691     /**
30692      * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
30693      */
30694     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionOrBuilder> 
30695         getRegionActionOrBuilderList() {
30696       return regionAction_;
30697     }
30698     /**
30699      * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
30700      */
30701     public int getRegionActionCount() {
30702       return regionAction_.size();
30703     }
30704     /**
30705      * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
30706      */
30707     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction getRegionAction(int index) {
30708       return regionAction_.get(index);
30709     }
30710     /**
30711      * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
30712      */
30713     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionOrBuilder getRegionActionOrBuilder(
30714         int index) {
30715       return regionAction_.get(index);
30716     }
30717 
30718     // optional uint64 nonceGroup = 2;
30719     public static final int NONCEGROUP_FIELD_NUMBER = 2;
30720     private long nonceGroup_;
30721     /**
30722      * <code>optional uint64 nonceGroup = 2;</code>
30723      */
30724     public boolean hasNonceGroup() {
30725       return ((bitField0_ & 0x00000001) == 0x00000001);
30726     }
30727     /**
30728      * <code>optional uint64 nonceGroup = 2;</code>
30729      */
30730     public long getNonceGroup() {
30731       return nonceGroup_;
30732     }
30733 
30734     // optional .hbase.pb.Condition condition = 3;
30735     public static final int CONDITION_FIELD_NUMBER = 3;
30736     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition condition_;
30737     /**
30738      * <code>optional .hbase.pb.Condition condition = 3;</code>
30739      */
30740     public boolean hasCondition() {
30741       return ((bitField0_ & 0x00000002) == 0x00000002);
30742     }
30743     /**
30744      * <code>optional .hbase.pb.Condition condition = 3;</code>
30745      */
30746     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition getCondition() {
30747       return condition_;
30748     }
30749     /**
30750      * <code>optional .hbase.pb.Condition condition = 3;</code>
30751      */
30752     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder getConditionOrBuilder() {
30753       return condition_;
30754     }
30755 
30756     private void initFields() {
30757       regionAction_ = java.util.Collections.emptyList();
30758       nonceGroup_ = 0L;
30759       condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance();
30760     }
30761     private byte memoizedIsInitialized = -1;
30762     public final boolean isInitialized() {
30763       byte isInitialized = memoizedIsInitialized;
30764       if (isInitialized != -1) return isInitialized == 1;
30765 
30766       for (int i = 0; i < getRegionActionCount(); i++) {
30767         if (!getRegionAction(i).isInitialized()) {
30768           memoizedIsInitialized = 0;
30769           return false;
30770         }
30771       }
30772       if (hasCondition()) {
30773         if (!getCondition().isInitialized()) {
30774           memoizedIsInitialized = 0;
30775           return false;
30776         }
30777       }
30778       memoizedIsInitialized = 1;
30779       return true;
30780     }
30781 
30782     public void writeTo(com.google.protobuf.CodedOutputStream output)
30783                         throws java.io.IOException {
30784       getSerializedSize();
30785       for (int i = 0; i < regionAction_.size(); i++) {
30786         output.writeMessage(1, regionAction_.get(i));
30787       }
30788       if (((bitField0_ & 0x00000001) == 0x00000001)) {
30789         output.writeUInt64(2, nonceGroup_);
30790       }
30791       if (((bitField0_ & 0x00000002) == 0x00000002)) {
30792         output.writeMessage(3, condition_);
30793       }
30794       getUnknownFields().writeTo(output);
30795     }
30796 
30797     private int memoizedSerializedSize = -1;
30798     public int getSerializedSize() {
30799       int size = memoizedSerializedSize;
30800       if (size != -1) return size;
30801 
30802       size = 0;
30803       for (int i = 0; i < regionAction_.size(); i++) {
30804         size += com.google.protobuf.CodedOutputStream
30805           .computeMessageSize(1, regionAction_.get(i));
30806       }
30807       if (((bitField0_ & 0x00000001) == 0x00000001)) {
30808         size += com.google.protobuf.CodedOutputStream
30809           .computeUInt64Size(2, nonceGroup_);
30810       }
30811       if (((bitField0_ & 0x00000002) == 0x00000002)) {
30812         size += com.google.protobuf.CodedOutputStream
30813           .computeMessageSize(3, condition_);
30814       }
30815       size += getUnknownFields().getSerializedSize();
30816       memoizedSerializedSize = size;
30817       return size;
30818     }
30819 
30820     private static final long serialVersionUID = 0L;
30821     @java.lang.Override
30822     protected java.lang.Object writeReplace()
30823         throws java.io.ObjectStreamException {
30824       return super.writeReplace();
30825     }
30826 
30827     @java.lang.Override
30828     public boolean equals(final java.lang.Object obj) {
30829       if (obj == this) {
30830        return true;
30831       }
30832       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest)) {
30833         return super.equals(obj);
30834       }
30835       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest) obj;
30836 
30837       boolean result = true;
30838       result = result && getRegionActionList()
30839           .equals(other.getRegionActionList());
30840       result = result && (hasNonceGroup() == other.hasNonceGroup());
30841       if (hasNonceGroup()) {
30842         result = result && (getNonceGroup()
30843             == other.getNonceGroup());
30844       }
30845       result = result && (hasCondition() == other.hasCondition());
30846       if (hasCondition()) {
30847         result = result && getCondition()
30848             .equals(other.getCondition());
30849       }
30850       result = result &&
30851           getUnknownFields().equals(other.getUnknownFields());
30852       return result;
30853     }
30854 
30855     private int memoizedHashCode = 0;
30856     @java.lang.Override
30857     public int hashCode() {
30858       if (memoizedHashCode != 0) {
30859         return memoizedHashCode;
30860       }
30861       int hash = 41;
30862       hash = (19 * hash) + getDescriptorForType().hashCode();
30863       if (getRegionActionCount() > 0) {
30864         hash = (37 * hash) + REGIONACTION_FIELD_NUMBER;
30865         hash = (53 * hash) + getRegionActionList().hashCode();
30866       }
30867       if (hasNonceGroup()) {
30868         hash = (37 * hash) + NONCEGROUP_FIELD_NUMBER;
30869         hash = (53 * hash) + hashLong(getNonceGroup());
30870       }
30871       if (hasCondition()) {
30872         hash = (37 * hash) + CONDITION_FIELD_NUMBER;
30873         hash = (53 * hash) + getCondition().hashCode();
30874       }
30875       hash = (29 * hash) + getUnknownFields().hashCode();
30876       memoizedHashCode = hash;
30877       return hash;
30878     }
30879 
30880     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom(
30881         com.google.protobuf.ByteString data)
30882         throws com.google.protobuf.InvalidProtocolBufferException {
30883       return PARSER.parseFrom(data);
30884     }
30885     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom(
30886         com.google.protobuf.ByteString data,
30887         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
30888         throws com.google.protobuf.InvalidProtocolBufferException {
30889       return PARSER.parseFrom(data, extensionRegistry);
30890     }
30891     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom(byte[] data)
30892         throws com.google.protobuf.InvalidProtocolBufferException {
30893       return PARSER.parseFrom(data);
30894     }
30895     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom(
30896         byte[] data,
30897         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
30898         throws com.google.protobuf.InvalidProtocolBufferException {
30899       return PARSER.parseFrom(data, extensionRegistry);
30900     }
30901     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom(java.io.InputStream input)
30902         throws java.io.IOException {
30903       return PARSER.parseFrom(input);
30904     }
30905     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom(
30906         java.io.InputStream input,
30907         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
30908         throws java.io.IOException {
30909       return PARSER.parseFrom(input, extensionRegistry);
30910     }
30911     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseDelimitedFrom(java.io.InputStream input)
30912         throws java.io.IOException {
30913       return PARSER.parseDelimitedFrom(input);
30914     }
30915     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseDelimitedFrom(
30916         java.io.InputStream input,
30917         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
30918         throws java.io.IOException {
30919       return PARSER.parseDelimitedFrom(input, extensionRegistry);
30920     }
30921     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom(
30922         com.google.protobuf.CodedInputStream input)
30923         throws java.io.IOException {
30924       return PARSER.parseFrom(input);
30925     }
30926     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom(
30927         com.google.protobuf.CodedInputStream input,
30928         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
30929         throws java.io.IOException {
30930       return PARSER.parseFrom(input, extensionRegistry);
30931     }
30932 
30933     public static Builder newBuilder() { return Builder.create(); }
30934     public Builder newBuilderForType() { return newBuilder(); }
30935     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest prototype) {
30936       return newBuilder().mergeFrom(prototype);
30937     }
30938     public Builder toBuilder() { return newBuilder(this); }
30939 
30940     @java.lang.Override
30941     protected Builder newBuilderForType(
30942         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
30943       Builder builder = new Builder(parent);
30944       return builder;
30945     }
30946     /**
30947      * Protobuf type {@code hbase.pb.MultiRequest}
30948      *
30949      * <pre>
30950      **
30951      * Execute a list of actions on a given region in order.
30952      * Nothing prevents a request to contains a set of RegionAction on the same region.
30953      * For this reason, the matching between the MultiRequest and the MultiResponse is not
30954      *  done by the region specifier but by keeping the order of the RegionActionResult vs.
30955      *  the order of the RegionAction.
30956      * </pre>
30957      */
30958     public static final class Builder extends
30959         com.google.protobuf.GeneratedMessage.Builder<Builder>
30960        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequestOrBuilder {
30961       public static final com.google.protobuf.Descriptors.Descriptor
30962           getDescriptor() {
30963         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiRequest_descriptor;
30964       }
30965 
30966       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
30967           internalGetFieldAccessorTable() {
30968         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiRequest_fieldAccessorTable
30969             .ensureFieldAccessorsInitialized(
30970                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.Builder.class);
30971       }
30972 
30973       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.newBuilder()
30974       private Builder() {
30975         maybeForceBuilderInitialization();
30976       }
30977 
30978       private Builder(
30979           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
30980         super(parent);
30981         maybeForceBuilderInitialization();
30982       }
30983       private void maybeForceBuilderInitialization() {
30984         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
30985           getRegionActionFieldBuilder();
30986           getConditionFieldBuilder();
30987         }
30988       }
30989       private static Builder create() {
30990         return new Builder();
30991       }
30992 
30993       public Builder clear() {
30994         super.clear();
30995         if (regionActionBuilder_ == null) {
30996           regionAction_ = java.util.Collections.emptyList();
30997           bitField0_ = (bitField0_ & ~0x00000001);
30998         } else {
30999           regionActionBuilder_.clear();
31000         }
31001         nonceGroup_ = 0L;
31002         bitField0_ = (bitField0_ & ~0x00000002);
31003         if (conditionBuilder_ == null) {
31004           condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance();
31005         } else {
31006           conditionBuilder_.clear();
31007         }
31008         bitField0_ = (bitField0_ & ~0x00000004);
31009         return this;
31010       }
31011 
31012       public Builder clone() {
31013         return create().mergeFrom(buildPartial());
31014       }
31015 
31016       public com.google.protobuf.Descriptors.Descriptor
31017           getDescriptorForType() {
31018         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiRequest_descriptor;
31019       }
31020 
31021       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest getDefaultInstanceForType() {
31022         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance();
31023       }
31024 
31025       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest build() {
31026         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest result = buildPartial();
31027         if (!result.isInitialized()) {
31028           throw newUninitializedMessageException(result);
31029         }
31030         return result;
31031       }
31032 
31033       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest buildPartial() {
31034         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest(this);
31035         int from_bitField0_ = bitField0_;
31036         int to_bitField0_ = 0;
31037         if (regionActionBuilder_ == null) {
31038           if (((bitField0_ & 0x00000001) == 0x00000001)) {
31039             regionAction_ = java.util.Collections.unmodifiableList(regionAction_);
31040             bitField0_ = (bitField0_ & ~0x00000001);
31041           }
31042           result.regionAction_ = regionAction_;
31043         } else {
31044           result.regionAction_ = regionActionBuilder_.build();
31045         }
31046         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
31047           to_bitField0_ |= 0x00000001;
31048         }
31049         result.nonceGroup_ = nonceGroup_;
31050         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
31051           to_bitField0_ |= 0x00000002;
31052         }
31053         if (conditionBuilder_ == null) {
31054           result.condition_ = condition_;
31055         } else {
31056           result.condition_ = conditionBuilder_.build();
31057         }
31058         result.bitField0_ = to_bitField0_;
31059         onBuilt();
31060         return result;
31061       }
31062 
31063       public Builder mergeFrom(com.google.protobuf.Message other) {
31064         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest) {
31065           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest)other);
31066         } else {
31067           super.mergeFrom(other);
31068           return this;
31069         }
31070       }
31071 
31072       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest other) {
31073         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance()) return this;
31074         if (regionActionBuilder_ == null) {
31075           if (!other.regionAction_.isEmpty()) {
31076             if (regionAction_.isEmpty()) {
31077               regionAction_ = other.regionAction_;
31078               bitField0_ = (bitField0_ & ~0x00000001);
31079             } else {
31080               ensureRegionActionIsMutable();
31081               regionAction_.addAll(other.regionAction_);
31082             }
31083             onChanged();
31084           }
31085         } else {
31086           if (!other.regionAction_.isEmpty()) {
31087             if (regionActionBuilder_.isEmpty()) {
31088               regionActionBuilder_.dispose();
31089               regionActionBuilder_ = null;
31090               regionAction_ = other.regionAction_;
31091               bitField0_ = (bitField0_ & ~0x00000001);
31092               regionActionBuilder_ = 
31093                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
31094                    getRegionActionFieldBuilder() : null;
31095             } else {
31096               regionActionBuilder_.addAllMessages(other.regionAction_);
31097             }
31098           }
31099         }
31100         if (other.hasNonceGroup()) {
31101           setNonceGroup(other.getNonceGroup());
31102         }
31103         if (other.hasCondition()) {
31104           mergeCondition(other.getCondition());
31105         }
31106         this.mergeUnknownFields(other.getUnknownFields());
31107         return this;
31108       }
31109 
31110       public final boolean isInitialized() {
31111         for (int i = 0; i < getRegionActionCount(); i++) {
31112           if (!getRegionAction(i).isInitialized()) {
31113             
31114             return false;
31115           }
31116         }
31117         if (hasCondition()) {
31118           if (!getCondition().isInitialized()) {
31119             
31120             return false;
31121           }
31122         }
31123         return true;
31124       }
31125 
31126       public Builder mergeFrom(
31127           com.google.protobuf.CodedInputStream input,
31128           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
31129           throws java.io.IOException {
31130         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parsedMessage = null;
31131         try {
31132           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
31133         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
31134           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest) e.getUnfinishedMessage();
31135           throw e;
31136         } finally {
31137           if (parsedMessage != null) {
31138             mergeFrom(parsedMessage);
31139           }
31140         }
31141         return this;
31142       }
31143       private int bitField0_;
31144 
31145       // repeated .hbase.pb.RegionAction regionAction = 1;
31146       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction> regionAction_ =
31147         java.util.Collections.emptyList();
31148       private void ensureRegionActionIsMutable() {
31149         if (!((bitField0_ & 0x00000001) == 0x00000001)) {
31150           regionAction_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction>(regionAction_);
31151           bitField0_ |= 0x00000001;
31152          }
31153       }
31154 
31155       private com.google.protobuf.RepeatedFieldBuilder<
31156           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionOrBuilder> regionActionBuilder_;
31157 
31158       /**
31159        * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
31160        */
31161       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction> getRegionActionList() {
31162         if (regionActionBuilder_ == null) {
31163           return java.util.Collections.unmodifiableList(regionAction_);
31164         } else {
31165           return regionActionBuilder_.getMessageList();
31166         }
31167       }
31168       /**
31169        * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
31170        */
31171       public int getRegionActionCount() {
31172         if (regionActionBuilder_ == null) {
31173           return regionAction_.size();
31174         } else {
31175           return regionActionBuilder_.getCount();
31176         }
31177       }
31178       /**
31179        * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
31180        */
31181       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction getRegionAction(int index) {
31182         if (regionActionBuilder_ == null) {
31183           return regionAction_.get(index);
31184         } else {
31185           return regionActionBuilder_.getMessage(index);
31186         }
31187       }
31188       /**
31189        * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
31190        */
31191       public Builder setRegionAction(
31192           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction value) {
31193         if (regionActionBuilder_ == null) {
31194           if (value == null) {
31195             throw new NullPointerException();
31196           }
31197           ensureRegionActionIsMutable();
31198           regionAction_.set(index, value);
31199           onChanged();
31200         } else {
31201           regionActionBuilder_.setMessage(index, value);
31202         }
31203         return this;
31204       }
31205       /**
31206        * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
31207        */
31208       public Builder setRegionAction(
31209           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.Builder builderForValue) {
31210         if (regionActionBuilder_ == null) {
31211           ensureRegionActionIsMutable();
31212           regionAction_.set(index, builderForValue.build());
31213           onChanged();
31214         } else {
31215           regionActionBuilder_.setMessage(index, builderForValue.build());
31216         }
31217         return this;
31218       }
31219       /**
31220        * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
31221        */
31222       public Builder addRegionAction(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction value) {
31223         if (regionActionBuilder_ == null) {
31224           if (value == null) {
31225             throw new NullPointerException();
31226           }
31227           ensureRegionActionIsMutable();
31228           regionAction_.add(value);
31229           onChanged();
31230         } else {
31231           regionActionBuilder_.addMessage(value);
31232         }
31233         return this;
31234       }
31235       /**
31236        * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
31237        */
31238       public Builder addRegionAction(
31239           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction value) {
31240         if (regionActionBuilder_ == null) {
31241           if (value == null) {
31242             throw new NullPointerException();
31243           }
31244           ensureRegionActionIsMutable();
31245           regionAction_.add(index, value);
31246           onChanged();
31247         } else {
31248           regionActionBuilder_.addMessage(index, value);
31249         }
31250         return this;
31251       }
31252       /**
31253        * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
31254        */
31255       public Builder addRegionAction(
31256           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.Builder builderForValue) {
31257         if (regionActionBuilder_ == null) {
31258           ensureRegionActionIsMutable();
31259           regionAction_.add(builderForValue.build());
31260           onChanged();
31261         } else {
31262           regionActionBuilder_.addMessage(builderForValue.build());
31263         }
31264         return this;
31265       }
31266       /**
31267        * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
31268        */
31269       public Builder addRegionAction(
31270           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.Builder builderForValue) {
31271         if (regionActionBuilder_ == null) {
31272           ensureRegionActionIsMutable();
31273           regionAction_.add(index, builderForValue.build());
31274           onChanged();
31275         } else {
31276           regionActionBuilder_.addMessage(index, builderForValue.build());
31277         }
31278         return this;
31279       }
31280       /**
31281        * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
31282        */
31283       public Builder addAllRegionAction(
31284           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction> values) {
31285         if (regionActionBuilder_ == null) {
31286           ensureRegionActionIsMutable();
31287           super.addAll(values, regionAction_);
31288           onChanged();
31289         } else {
31290           regionActionBuilder_.addAllMessages(values);
31291         }
31292         return this;
31293       }
31294       /**
31295        * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
31296        */
31297       public Builder clearRegionAction() {
31298         if (regionActionBuilder_ == null) {
31299           regionAction_ = java.util.Collections.emptyList();
31300           bitField0_ = (bitField0_ & ~0x00000001);
31301           onChanged();
31302         } else {
31303           regionActionBuilder_.clear();
31304         }
31305         return this;
31306       }
31307       /**
31308        * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
31309        */
31310       public Builder removeRegionAction(int index) {
31311         if (regionActionBuilder_ == null) {
31312           ensureRegionActionIsMutable();
31313           regionAction_.remove(index);
31314           onChanged();
31315         } else {
31316           regionActionBuilder_.remove(index);
31317         }
31318         return this;
31319       }
31320       /**
31321        * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
31322        */
31323       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.Builder getRegionActionBuilder(
31324           int index) {
31325         return getRegionActionFieldBuilder().getBuilder(index);
31326       }
31327       /**
31328        * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
31329        */
31330       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionOrBuilder getRegionActionOrBuilder(
31331           int index) {
31332         if (regionActionBuilder_ == null) {
31333           return regionAction_.get(index);  } else {
31334           return regionActionBuilder_.getMessageOrBuilder(index);
31335         }
31336       }
31337       /**
31338        * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
31339        */
31340       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionOrBuilder> 
31341            getRegionActionOrBuilderList() {
31342         if (regionActionBuilder_ != null) {
31343           return regionActionBuilder_.getMessageOrBuilderList();
31344         } else {
31345           return java.util.Collections.unmodifiableList(regionAction_);
31346         }
31347       }
31348       /**
31349        * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
31350        */
31351       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.Builder addRegionActionBuilder() {
31352         return getRegionActionFieldBuilder().addBuilder(
31353             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.getDefaultInstance());
31354       }
31355       /**
31356        * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
31357        */
31358       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.Builder addRegionActionBuilder(
31359           int index) {
31360         return getRegionActionFieldBuilder().addBuilder(
31361             index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.getDefaultInstance());
31362       }
31363       /**
31364        * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
31365        */
31366       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.Builder> 
31367            getRegionActionBuilderList() {
31368         return getRegionActionFieldBuilder().getBuilderList();
31369       }
31370       private com.google.protobuf.RepeatedFieldBuilder<
31371           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionOrBuilder> 
31372           getRegionActionFieldBuilder() {
31373         if (regionActionBuilder_ == null) {
31374           regionActionBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
31375               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionOrBuilder>(
31376                   regionAction_,
31377                   ((bitField0_ & 0x00000001) == 0x00000001),
31378                   getParentForChildren(),
31379                   isClean());
31380           regionAction_ = null;
31381         }
31382         return regionActionBuilder_;
31383       }
31384 
31385       // optional uint64 nonceGroup = 2;
31386       private long nonceGroup_ ;
31387       /**
31388        * <code>optional uint64 nonceGroup = 2;</code>
31389        */
31390       public boolean hasNonceGroup() {
31391         return ((bitField0_ & 0x00000002) == 0x00000002);
31392       }
31393       /**
31394        * <code>optional uint64 nonceGroup = 2;</code>
31395        */
31396       public long getNonceGroup() {
31397         return nonceGroup_;
31398       }
31399       /**
31400        * <code>optional uint64 nonceGroup = 2;</code>
31401        */
31402       public Builder setNonceGroup(long value) {
31403         bitField0_ |= 0x00000002;
31404         nonceGroup_ = value;
31405         onChanged();
31406         return this;
31407       }
31408       /**
31409        * <code>optional uint64 nonceGroup = 2;</code>
31410        */
31411       public Builder clearNonceGroup() {
31412         bitField0_ = (bitField0_ & ~0x00000002);
31413         nonceGroup_ = 0L;
31414         onChanged();
31415         return this;
31416       }
31417 
31418       // optional .hbase.pb.Condition condition = 3;
31419       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance();
31420       private com.google.protobuf.SingleFieldBuilder<
31421           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder> conditionBuilder_;
31422       /**
31423        * <code>optional .hbase.pb.Condition condition = 3;</code>
31424        */
31425       public boolean hasCondition() {
31426         return ((bitField0_ & 0x00000004) == 0x00000004);
31427       }
31428       /**
31429        * <code>optional .hbase.pb.Condition condition = 3;</code>
31430        */
31431       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition getCondition() {
31432         if (conditionBuilder_ == null) {
31433           return condition_;
31434         } else {
31435           return conditionBuilder_.getMessage();
31436         }
31437       }
31438       /**
31439        * <code>optional .hbase.pb.Condition condition = 3;</code>
31440        */
31441       public Builder setCondition(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition value) {
31442         if (conditionBuilder_ == null) {
31443           if (value == null) {
31444             throw new NullPointerException();
31445           }
31446           condition_ = value;
31447           onChanged();
31448         } else {
31449           conditionBuilder_.setMessage(value);
31450         }
31451         bitField0_ |= 0x00000004;
31452         return this;
31453       }
31454       /**
31455        * <code>optional .hbase.pb.Condition condition = 3;</code>
31456        */
31457       public Builder setCondition(
31458           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder builderForValue) {
31459         if (conditionBuilder_ == null) {
31460           condition_ = builderForValue.build();
31461           onChanged();
31462         } else {
31463           conditionBuilder_.setMessage(builderForValue.build());
31464         }
31465         bitField0_ |= 0x00000004;
31466         return this;
31467       }
31468       /**
31469        * <code>optional .hbase.pb.Condition condition = 3;</code>
31470        */
31471       public Builder mergeCondition(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition value) {
31472         if (conditionBuilder_ == null) {
31473           if (((bitField0_ & 0x00000004) == 0x00000004) &&
31474               condition_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance()) {
31475             condition_ =
31476               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.newBuilder(condition_).mergeFrom(value).buildPartial();
31477           } else {
31478             condition_ = value;
31479           }
31480           onChanged();
31481         } else {
31482           conditionBuilder_.mergeFrom(value);
31483         }
31484         bitField0_ |= 0x00000004;
31485         return this;
31486       }
31487       /**
31488        * <code>optional .hbase.pb.Condition condition = 3;</code>
31489        */
31490       public Builder clearCondition() {
31491         if (conditionBuilder_ == null) {
31492           condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance();
31493           onChanged();
31494         } else {
31495           conditionBuilder_.clear();
31496         }
31497         bitField0_ = (bitField0_ & ~0x00000004);
31498         return this;
31499       }
31500       /**
31501        * <code>optional .hbase.pb.Condition condition = 3;</code>
31502        */
31503       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder getConditionBuilder() {
31504         bitField0_ |= 0x00000004;
31505         onChanged();
31506         return getConditionFieldBuilder().getBuilder();
31507       }
31508       /**
31509        * <code>optional .hbase.pb.Condition condition = 3;</code>
31510        */
31511       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder getConditionOrBuilder() {
31512         if (conditionBuilder_ != null) {
31513           return conditionBuilder_.getMessageOrBuilder();
31514         } else {
31515           return condition_;
31516         }
31517       }
31518       /**
31519        * <code>optional .hbase.pb.Condition condition = 3;</code>
31520        */
31521       private com.google.protobuf.SingleFieldBuilder<
31522           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder> 
31523           getConditionFieldBuilder() {
31524         if (conditionBuilder_ == null) {
31525           conditionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
31526               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder>(
31527                   condition_,
31528                   getParentForChildren(),
31529                   isClean());
31530           condition_ = null;
31531         }
31532         return conditionBuilder_;
31533       }
31534 
31535       // @@protoc_insertion_point(builder_scope:hbase.pb.MultiRequest)
31536     }
31537 
31538     static {
31539       defaultInstance = new MultiRequest(true);
31540       defaultInstance.initFields();
31541     }
31542 
31543     // @@protoc_insertion_point(class_scope:hbase.pb.MultiRequest)
31544   }
31545 
31546   public interface MultiResponseOrBuilder
31547       extends com.google.protobuf.MessageOrBuilder {
31548 
31549     // repeated .hbase.pb.RegionActionResult regionActionResult = 1;
31550     /**
31551      * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
31552      */
31553     java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult> 
31554         getRegionActionResultList();
31555     /**
31556      * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
31557      */
31558     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult getRegionActionResult(int index);
31559     /**
31560      * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
31561      */
31562     int getRegionActionResultCount();
31563     /**
31564      * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
31565      */
31566     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder> 
31567         getRegionActionResultOrBuilderList();
31568     /**
31569      * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
31570      */
31571     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder getRegionActionResultOrBuilder(
31572         int index);
31573 
31574     // optional bool processed = 2;
31575     /**
31576      * <code>optional bool processed = 2;</code>
31577      *
31578      * <pre>
31579      * used for mutate to indicate processed only
31580      * </pre>
31581      */
31582     boolean hasProcessed();
31583     /**
31584      * <code>optional bool processed = 2;</code>
31585      *
31586      * <pre>
31587      * used for mutate to indicate processed only
31588      * </pre>
31589      */
31590     boolean getProcessed();
31591   }
31592   /**
31593    * Protobuf type {@code hbase.pb.MultiResponse}
31594    */
31595   public static final class MultiResponse extends
31596       com.google.protobuf.GeneratedMessage
31597       implements MultiResponseOrBuilder {
31598     // Use MultiResponse.newBuilder() to construct.
31599     private MultiResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
31600       super(builder);
31601       this.unknownFields = builder.getUnknownFields();
31602     }
31603     private MultiResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
31604 
31605     private static final MultiResponse defaultInstance;
31606     public static MultiResponse getDefaultInstance() {
31607       return defaultInstance;
31608     }
31609 
31610     public MultiResponse getDefaultInstanceForType() {
31611       return defaultInstance;
31612     }
31613 
31614     private final com.google.protobuf.UnknownFieldSet unknownFields;
31615     @java.lang.Override
31616     public final com.google.protobuf.UnknownFieldSet
31617         getUnknownFields() {
31618       return this.unknownFields;
31619     }
31620     private MultiResponse(
31621         com.google.protobuf.CodedInputStream input,
31622         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
31623         throws com.google.protobuf.InvalidProtocolBufferException {
31624       initFields();
31625       int mutable_bitField0_ = 0;
31626       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
31627           com.google.protobuf.UnknownFieldSet.newBuilder();
31628       try {
31629         boolean done = false;
31630         while (!done) {
31631           int tag = input.readTag();
31632           switch (tag) {
31633             case 0:
31634               done = true;
31635               break;
31636             default: {
31637               if (!parseUnknownField(input, unknownFields,
31638                                      extensionRegistry, tag)) {
31639                 done = true;
31640               }
31641               break;
31642             }
31643             case 10: {
31644               if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
31645                 regionActionResult_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult>();
31646                 mutable_bitField0_ |= 0x00000001;
31647               }
31648               regionActionResult_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.PARSER, extensionRegistry));
31649               break;
31650             }
31651             case 16: {
31652               bitField0_ |= 0x00000001;
31653               processed_ = input.readBool();
31654               break;
31655             }
31656           }
31657         }
31658       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
31659         throw e.setUnfinishedMessage(this);
31660       } catch (java.io.IOException e) {
31661         throw new com.google.protobuf.InvalidProtocolBufferException(
31662             e.getMessage()).setUnfinishedMessage(this);
31663       } finally {
31664         if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
31665           regionActionResult_ = java.util.Collections.unmodifiableList(regionActionResult_);
31666         }
31667         this.unknownFields = unknownFields.build();
31668         makeExtensionsImmutable();
31669       }
31670     }
31671     public static final com.google.protobuf.Descriptors.Descriptor
31672         getDescriptor() {
31673       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiResponse_descriptor;
31674     }
31675 
31676     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
31677         internalGetFieldAccessorTable() {
31678       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiResponse_fieldAccessorTable
31679           .ensureFieldAccessorsInitialized(
31680               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.Builder.class);
31681     }
31682 
31683     public static com.google.protobuf.Parser<MultiResponse> PARSER =
31684         new com.google.protobuf.AbstractParser<MultiResponse>() {
31685       public MultiResponse parsePartialFrom(
31686           com.google.protobuf.CodedInputStream input,
31687           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
31688           throws com.google.protobuf.InvalidProtocolBufferException {
31689         return new MultiResponse(input, extensionRegistry);
31690       }
31691     };
31692 
31693     @java.lang.Override
31694     public com.google.protobuf.Parser<MultiResponse> getParserForType() {
31695       return PARSER;
31696     }
31697 
31698     private int bitField0_;
31699     // repeated .hbase.pb.RegionActionResult regionActionResult = 1;
31700     public static final int REGIONACTIONRESULT_FIELD_NUMBER = 1;
31701     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult> regionActionResult_;
31702     /**
31703      * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
31704      */
31705     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult> getRegionActionResultList() {
31706       return regionActionResult_;
31707     }
31708     /**
31709      * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
31710      */
31711     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder> 
31712         getRegionActionResultOrBuilderList() {
31713       return regionActionResult_;
31714     }
31715     /**
31716      * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
31717      */
31718     public int getRegionActionResultCount() {
31719       return regionActionResult_.size();
31720     }
31721     /**
31722      * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
31723      */
31724     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult getRegionActionResult(int index) {
31725       return regionActionResult_.get(index);
31726     }
31727     /**
31728      * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
31729      */
31730     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder getRegionActionResultOrBuilder(
31731         int index) {
31732       return regionActionResult_.get(index);
31733     }
31734 
31735     // optional bool processed = 2;
31736     public static final int PROCESSED_FIELD_NUMBER = 2;
31737     private boolean processed_;
31738     /**
31739      * <code>optional bool processed = 2;</code>
31740      *
31741      * <pre>
31742      * used for mutate to indicate processed only
31743      * </pre>
31744      */
31745     public boolean hasProcessed() {
31746       return ((bitField0_ & 0x00000001) == 0x00000001);
31747     }
31748     /**
31749      * <code>optional bool processed = 2;</code>
31750      *
31751      * <pre>
31752      * used for mutate to indicate processed only
31753      * </pre>
31754      */
31755     public boolean getProcessed() {
31756       return processed_;
31757     }
31758 
31759     private void initFields() {
31760       regionActionResult_ = java.util.Collections.emptyList();
31761       processed_ = false;
31762     }
31763     private byte memoizedIsInitialized = -1;
31764     public final boolean isInitialized() {
31765       byte isInitialized = memoizedIsInitialized;
31766       if (isInitialized != -1) return isInitialized == 1;
31767 
31768       for (int i = 0; i < getRegionActionResultCount(); i++) {
31769         if (!getRegionActionResult(i).isInitialized()) {
31770           memoizedIsInitialized = 0;
31771           return false;
31772         }
31773       }
31774       memoizedIsInitialized = 1;
31775       return true;
31776     }
31777 
31778     public void writeTo(com.google.protobuf.CodedOutputStream output)
31779                         throws java.io.IOException {
31780       getSerializedSize();
31781       for (int i = 0; i < regionActionResult_.size(); i++) {
31782         output.writeMessage(1, regionActionResult_.get(i));
31783       }
31784       if (((bitField0_ & 0x00000001) == 0x00000001)) {
31785         output.writeBool(2, processed_);
31786       }
31787       getUnknownFields().writeTo(output);
31788     }
31789 
31790     private int memoizedSerializedSize = -1;
31791     public int getSerializedSize() {
31792       int size = memoizedSerializedSize;
31793       if (size != -1) return size;
31794 
31795       size = 0;
31796       for (int i = 0; i < regionActionResult_.size(); i++) {
31797         size += com.google.protobuf.CodedOutputStream
31798           .computeMessageSize(1, regionActionResult_.get(i));
31799       }
31800       if (((bitField0_ & 0x00000001) == 0x00000001)) {
31801         size += com.google.protobuf.CodedOutputStream
31802           .computeBoolSize(2, processed_);
31803       }
31804       size += getUnknownFields().getSerializedSize();
31805       memoizedSerializedSize = size;
31806       return size;
31807     }
31808 
31809     private static final long serialVersionUID = 0L;
31810     @java.lang.Override
31811     protected java.lang.Object writeReplace()
31812         throws java.io.ObjectStreamException {
31813       return super.writeReplace();
31814     }
31815 
31816     @java.lang.Override
31817     public boolean equals(final java.lang.Object obj) {
31818       if (obj == this) {
31819        return true;
31820       }
31821       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse)) {
31822         return super.equals(obj);
31823       }
31824       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse) obj;
31825 
31826       boolean result = true;
31827       result = result && getRegionActionResultList()
31828           .equals(other.getRegionActionResultList());
31829       result = result && (hasProcessed() == other.hasProcessed());
31830       if (hasProcessed()) {
31831         result = result && (getProcessed()
31832             == other.getProcessed());
31833       }
31834       result = result &&
31835           getUnknownFields().equals(other.getUnknownFields());
31836       return result;
31837     }
31838 
31839     private int memoizedHashCode = 0;
31840     @java.lang.Override
31841     public int hashCode() {
31842       if (memoizedHashCode != 0) {
31843         return memoizedHashCode;
31844       }
31845       int hash = 41;
31846       hash = (19 * hash) + getDescriptorForType().hashCode();
31847       if (getRegionActionResultCount() > 0) {
31848         hash = (37 * hash) + REGIONACTIONRESULT_FIELD_NUMBER;
31849         hash = (53 * hash) + getRegionActionResultList().hashCode();
31850       }
31851       if (hasProcessed()) {
31852         hash = (37 * hash) + PROCESSED_FIELD_NUMBER;
31853         hash = (53 * hash) + hashBoolean(getProcessed());
31854       }
31855       hash = (29 * hash) + getUnknownFields().hashCode();
31856       memoizedHashCode = hash;
31857       return hash;
31858     }
31859 
31860     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom(
31861         com.google.protobuf.ByteString data)
31862         throws com.google.protobuf.InvalidProtocolBufferException {
31863       return PARSER.parseFrom(data);
31864     }
31865     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom(
31866         com.google.protobuf.ByteString data,
31867         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
31868         throws com.google.protobuf.InvalidProtocolBufferException {
31869       return PARSER.parseFrom(data, extensionRegistry);
31870     }
31871     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom(byte[] data)
31872         throws com.google.protobuf.InvalidProtocolBufferException {
31873       return PARSER.parseFrom(data);
31874     }
31875     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom(
31876         byte[] data,
31877         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
31878         throws com.google.protobuf.InvalidProtocolBufferException {
31879       return PARSER.parseFrom(data, extensionRegistry);
31880     }
31881     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom(java.io.InputStream input)
31882         throws java.io.IOException {
31883       return PARSER.parseFrom(input);
31884     }
31885     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom(
31886         java.io.InputStream input,
31887         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
31888         throws java.io.IOException {
31889       return PARSER.parseFrom(input, extensionRegistry);
31890     }
31891     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseDelimitedFrom(java.io.InputStream input)
31892         throws java.io.IOException {
31893       return PARSER.parseDelimitedFrom(input);
31894     }
31895     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseDelimitedFrom(
31896         java.io.InputStream input,
31897         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
31898         throws java.io.IOException {
31899       return PARSER.parseDelimitedFrom(input, extensionRegistry);
31900     }
31901     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom(
31902         com.google.protobuf.CodedInputStream input)
31903         throws java.io.IOException {
31904       return PARSER.parseFrom(input);
31905     }
31906     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom(
31907         com.google.protobuf.CodedInputStream input,
31908         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
31909         throws java.io.IOException {
31910       return PARSER.parseFrom(input, extensionRegistry);
31911     }
31912 
31913     public static Builder newBuilder() { return Builder.create(); }
31914     public Builder newBuilderForType() { return newBuilder(); }
31915     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse prototype) {
31916       return newBuilder().mergeFrom(prototype);
31917     }
31918     public Builder toBuilder() { return newBuilder(this); }
31919 
31920     @java.lang.Override
31921     protected Builder newBuilderForType(
31922         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
31923       Builder builder = new Builder(parent);
31924       return builder;
31925     }
31926     /**
31927      * Protobuf type {@code hbase.pb.MultiResponse}
31928      */
31929     public static final class Builder extends
31930         com.google.protobuf.GeneratedMessage.Builder<Builder>
31931        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponseOrBuilder {
31932       public static final com.google.protobuf.Descriptors.Descriptor
31933           getDescriptor() {
31934         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiResponse_descriptor;
31935       }
31936 
31937       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
31938           internalGetFieldAccessorTable() {
31939         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiResponse_fieldAccessorTable
31940             .ensureFieldAccessorsInitialized(
31941                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.Builder.class);
31942       }
31943 
31944       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.newBuilder()
31945       private Builder() {
31946         maybeForceBuilderInitialization();
31947       }
31948 
31949       private Builder(
31950           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
31951         super(parent);
31952         maybeForceBuilderInitialization();
31953       }
31954       private void maybeForceBuilderInitialization() {
31955         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
31956           getRegionActionResultFieldBuilder();
31957         }
31958       }
31959       private static Builder create() {
31960         return new Builder();
31961       }
31962 
31963       public Builder clear() {
31964         super.clear();
31965         if (regionActionResultBuilder_ == null) {
31966           regionActionResult_ = java.util.Collections.emptyList();
31967           bitField0_ = (bitField0_ & ~0x00000001);
31968         } else {
31969           regionActionResultBuilder_.clear();
31970         }
31971         processed_ = false;
31972         bitField0_ = (bitField0_ & ~0x00000002);
31973         return this;
31974       }
31975 
31976       public Builder clone() {
31977         return create().mergeFrom(buildPartial());
31978       }
31979 
31980       public com.google.protobuf.Descriptors.Descriptor
31981           getDescriptorForType() {
31982         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiResponse_descriptor;
31983       }
31984 
31985       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse getDefaultInstanceForType() {
31986         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance();
31987       }
31988 
31989       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse build() {
31990         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse result = buildPartial();
31991         if (!result.isInitialized()) {
31992           throw newUninitializedMessageException(result);
31993         }
31994         return result;
31995       }
31996 
31997       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse buildPartial() {
31998         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse(this);
31999         int from_bitField0_ = bitField0_;
32000         int to_bitField0_ = 0;
32001         if (regionActionResultBuilder_ == null) {
32002           if (((bitField0_ & 0x00000001) == 0x00000001)) {
32003             regionActionResult_ = java.util.Collections.unmodifiableList(regionActionResult_);
32004             bitField0_ = (bitField0_ & ~0x00000001);
32005           }
32006           result.regionActionResult_ = regionActionResult_;
32007         } else {
32008           result.regionActionResult_ = regionActionResultBuilder_.build();
32009         }
32010         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
32011           to_bitField0_ |= 0x00000001;
32012         }
32013         result.processed_ = processed_;
32014         result.bitField0_ = to_bitField0_;
32015         onBuilt();
32016         return result;
32017       }
32018 
32019       public Builder mergeFrom(com.google.protobuf.Message other) {
32020         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse) {
32021           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse)other);
32022         } else {
32023           super.mergeFrom(other);
32024           return this;
32025         }
32026       }
32027 
32028       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse other) {
32029         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance()) return this;
32030         if (regionActionResultBuilder_ == null) {
32031           if (!other.regionActionResult_.isEmpty()) {
32032             if (regionActionResult_.isEmpty()) {
32033               regionActionResult_ = other.regionActionResult_;
32034               bitField0_ = (bitField0_ & ~0x00000001);
32035             } else {
32036               ensureRegionActionResultIsMutable();
32037               regionActionResult_.addAll(other.regionActionResult_);
32038             }
32039             onChanged();
32040           }
32041         } else {
32042           if (!other.regionActionResult_.isEmpty()) {
32043             if (regionActionResultBuilder_.isEmpty()) {
32044               regionActionResultBuilder_.dispose();
32045               regionActionResultBuilder_ = null;
32046               regionActionResult_ = other.regionActionResult_;
32047               bitField0_ = (bitField0_ & ~0x00000001);
32048               regionActionResultBuilder_ = 
32049                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
32050                    getRegionActionResultFieldBuilder() : null;
32051             } else {
32052               regionActionResultBuilder_.addAllMessages(other.regionActionResult_);
32053             }
32054           }
32055         }
32056         if (other.hasProcessed()) {
32057           setProcessed(other.getProcessed());
32058         }
32059         this.mergeUnknownFields(other.getUnknownFields());
32060         return this;
32061       }
32062 
32063       public final boolean isInitialized() {
32064         for (int i = 0; i < getRegionActionResultCount(); i++) {
32065           if (!getRegionActionResult(i).isInitialized()) {
32066             
32067             return false;
32068           }
32069         }
32070         return true;
32071       }
32072 
32073       public Builder mergeFrom(
32074           com.google.protobuf.CodedInputStream input,
32075           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
32076           throws java.io.IOException {
32077         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parsedMessage = null;
32078         try {
32079           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
32080         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
32081           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse) e.getUnfinishedMessage();
32082           throw e;
32083         } finally {
32084           if (parsedMessage != null) {
32085             mergeFrom(parsedMessage);
32086           }
32087         }
32088         return this;
32089       }
32090       private int bitField0_;
32091 
32092       // repeated .hbase.pb.RegionActionResult regionActionResult = 1;
32093       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult> regionActionResult_ =
32094         java.util.Collections.emptyList();
32095       private void ensureRegionActionResultIsMutable() {
32096         if (!((bitField0_ & 0x00000001) == 0x00000001)) {
32097           regionActionResult_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult>(regionActionResult_);
32098           bitField0_ |= 0x00000001;
32099          }
32100       }
32101 
32102       private com.google.protobuf.RepeatedFieldBuilder<
32103           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder> regionActionResultBuilder_;
32104 
32105       /**
32106        * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
32107        */
32108       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult> getRegionActionResultList() {
32109         if (regionActionResultBuilder_ == null) {
32110           return java.util.Collections.unmodifiableList(regionActionResult_);
32111         } else {
32112           return regionActionResultBuilder_.getMessageList();
32113         }
32114       }
32115       /**
32116        * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
32117        */
32118       public int getRegionActionResultCount() {
32119         if (regionActionResultBuilder_ == null) {
32120           return regionActionResult_.size();
32121         } else {
32122           return regionActionResultBuilder_.getCount();
32123         }
32124       }
32125       /**
32126        * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
32127        */
32128       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult getRegionActionResult(int index) {
32129         if (regionActionResultBuilder_ == null) {
32130           return regionActionResult_.get(index);
32131         } else {
32132           return regionActionResultBuilder_.getMessage(index);
32133         }
32134       }
32135       /**
32136        * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
32137        */
32138       public Builder setRegionActionResult(
32139           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult value) {
32140         if (regionActionResultBuilder_ == null) {
32141           if (value == null) {
32142             throw new NullPointerException();
32143           }
32144           ensureRegionActionResultIsMutable();
32145           regionActionResult_.set(index, value);
32146           onChanged();
32147         } else {
32148           regionActionResultBuilder_.setMessage(index, value);
32149         }
32150         return this;
32151       }
32152       /**
32153        * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
32154        */
32155       public Builder setRegionActionResult(
32156           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder builderForValue) {
32157         if (regionActionResultBuilder_ == null) {
32158           ensureRegionActionResultIsMutable();
32159           regionActionResult_.set(index, builderForValue.build());
32160           onChanged();
32161         } else {
32162           regionActionResultBuilder_.setMessage(index, builderForValue.build());
32163         }
32164         return this;
32165       }
32166       /**
32167        * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
32168        */
32169       public Builder addRegionActionResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult value) {
32170         if (regionActionResultBuilder_ == null) {
32171           if (value == null) {
32172             throw new NullPointerException();
32173           }
32174           ensureRegionActionResultIsMutable();
32175           regionActionResult_.add(value);
32176           onChanged();
32177         } else {
32178           regionActionResultBuilder_.addMessage(value);
32179         }
32180         return this;
32181       }
32182       /**
32183        * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
32184        */
32185       public Builder addRegionActionResult(
32186           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult value) {
32187         if (regionActionResultBuilder_ == null) {
32188           if (value == null) {
32189             throw new NullPointerException();
32190           }
32191           ensureRegionActionResultIsMutable();
32192           regionActionResult_.add(index, value);
32193           onChanged();
32194         } else {
32195           regionActionResultBuilder_.addMessage(index, value);
32196         }
32197         return this;
32198       }
32199       /**
32200        * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
32201        */
32202       public Builder addRegionActionResult(
32203           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder builderForValue) {
32204         if (regionActionResultBuilder_ == null) {
32205           ensureRegionActionResultIsMutable();
32206           regionActionResult_.add(builderForValue.build());
32207           onChanged();
32208         } else {
32209           regionActionResultBuilder_.addMessage(builderForValue.build());
32210         }
32211         return this;
32212       }
32213       /**
32214        * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
32215        */
32216       public Builder addRegionActionResult(
32217           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder builderForValue) {
32218         if (regionActionResultBuilder_ == null) {
32219           ensureRegionActionResultIsMutable();
32220           regionActionResult_.add(index, builderForValue.build());
32221           onChanged();
32222         } else {
32223           regionActionResultBuilder_.addMessage(index, builderForValue.build());
32224         }
32225         return this;
32226       }
32227       /**
32228        * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
32229        */
32230       public Builder addAllRegionActionResult(
32231           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult> values) {
32232         if (regionActionResultBuilder_ == null) {
32233           ensureRegionActionResultIsMutable();
32234           super.addAll(values, regionActionResult_);
32235           onChanged();
32236         } else {
32237           regionActionResultBuilder_.addAllMessages(values);
32238         }
32239         return this;
32240       }
32241       /**
32242        * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
32243        */
32244       public Builder clearRegionActionResult() {
32245         if (regionActionResultBuilder_ == null) {
32246           regionActionResult_ = java.util.Collections.emptyList();
32247           bitField0_ = (bitField0_ & ~0x00000001);
32248           onChanged();
32249         } else {
32250           regionActionResultBuilder_.clear();
32251         }
32252         return this;
32253       }
32254       /**
32255        * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
32256        */
32257       public Builder removeRegionActionResult(int index) {
32258         if (regionActionResultBuilder_ == null) {
32259           ensureRegionActionResultIsMutable();
32260           regionActionResult_.remove(index);
32261           onChanged();
32262         } else {
32263           regionActionResultBuilder_.remove(index);
32264         }
32265         return this;
32266       }
32267       /**
32268        * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
32269        */
32270       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder getRegionActionResultBuilder(
32271           int index) {
32272         return getRegionActionResultFieldBuilder().getBuilder(index);
32273       }
32274       /**
32275        * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
32276        */
32277       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder getRegionActionResultOrBuilder(
32278           int index) {
32279         if (regionActionResultBuilder_ == null) {
32280           return regionActionResult_.get(index);  } else {
32281           return regionActionResultBuilder_.getMessageOrBuilder(index);
32282         }
32283       }
32284       /**
32285        * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
32286        */
32287       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder> 
32288            getRegionActionResultOrBuilderList() {
32289         if (regionActionResultBuilder_ != null) {
32290           return regionActionResultBuilder_.getMessageOrBuilderList();
32291         } else {
32292           return java.util.Collections.unmodifiableList(regionActionResult_);
32293         }
32294       }
32295       /**
32296        * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
32297        */
32298       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder addRegionActionResultBuilder() {
32299         return getRegionActionResultFieldBuilder().addBuilder(
32300             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.getDefaultInstance());
32301       }
32302       /**
32303        * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
32304        */
32305       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder addRegionActionResultBuilder(
32306           int index) {
32307         return getRegionActionResultFieldBuilder().addBuilder(
32308             index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.getDefaultInstance());
32309       }
32310       /**
32311        * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
32312        */
32313       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder> 
32314            getRegionActionResultBuilderList() {
32315         return getRegionActionResultFieldBuilder().getBuilderList();
32316       }
32317       private com.google.protobuf.RepeatedFieldBuilder<
32318           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder> 
32319           getRegionActionResultFieldBuilder() {
32320         if (regionActionResultBuilder_ == null) {
32321           regionActionResultBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
32322               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder>(
32323                   regionActionResult_,
32324                   ((bitField0_ & 0x00000001) == 0x00000001),
32325                   getParentForChildren(),
32326                   isClean());
32327           regionActionResult_ = null;
32328         }
32329         return regionActionResultBuilder_;
32330       }
32331 
32332       // optional bool processed = 2;
32333       private boolean processed_ ;
32334       /**
32335        * <code>optional bool processed = 2;</code>
32336        *
32337        * <pre>
32338        * used for mutate to indicate processed only
32339        * </pre>
32340        */
32341       public boolean hasProcessed() {
32342         return ((bitField0_ & 0x00000002) == 0x00000002);
32343       }
32344       /**
32345        * <code>optional bool processed = 2;</code>
32346        *
32347        * <pre>
32348        * used for mutate to indicate processed only
32349        * </pre>
32350        */
32351       public boolean getProcessed() {
32352         return processed_;
32353       }
32354       /**
32355        * <code>optional bool processed = 2;</code>
32356        *
32357        * <pre>
32358        * used for mutate to indicate processed only
32359        * </pre>
32360        */
32361       public Builder setProcessed(boolean value) {
32362         bitField0_ |= 0x00000002;
32363         processed_ = value;
32364         onChanged();
32365         return this;
32366       }
32367       /**
32368        * <code>optional bool processed = 2;</code>
32369        *
32370        * <pre>
32371        * used for mutate to indicate processed only
32372        * </pre>
32373        */
32374       public Builder clearProcessed() {
32375         bitField0_ = (bitField0_ & ~0x00000002);
32376         processed_ = false;
32377         onChanged();
32378         return this;
32379       }
32380 
32381       // @@protoc_insertion_point(builder_scope:hbase.pb.MultiResponse)
32382     }
32383 
32384     static {
32385       defaultInstance = new MultiResponse(true);
32386       defaultInstance.initFields();
32387     }
32388 
32389     // @@protoc_insertion_point(class_scope:hbase.pb.MultiResponse)
32390   }
32391 
32392   /**
32393    * Protobuf service {@code hbase.pb.ClientService}
32394    */
32395   public static abstract class ClientService
32396       implements com.google.protobuf.Service {
32397     protected ClientService() {}
32398 
32399     public interface Interface {
32400       /**
32401        * <code>rpc Get(.hbase.pb.GetRequest) returns (.hbase.pb.GetResponse);</code>
32402        */
32403       public abstract void get(
32404           com.google.protobuf.RpcController controller,
32405           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request,
32406           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse> done);
32407 
32408       /**
32409        * <code>rpc Mutate(.hbase.pb.MutateRequest) returns (.hbase.pb.MutateResponse);</code>
32410        */
32411       public abstract void mutate(
32412           com.google.protobuf.RpcController controller,
32413           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request,
32414           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse> done);
32415 
32416       /**
32417        * <code>rpc Scan(.hbase.pb.ScanRequest) returns (.hbase.pb.ScanResponse);</code>
32418        */
32419       public abstract void scan(
32420           com.google.protobuf.RpcController controller,
32421           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request,
32422           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse> done);
32423 
32424       /**
32425        * <code>rpc BulkLoadHFile(.hbase.pb.BulkLoadHFileRequest) returns (.hbase.pb.BulkLoadHFileResponse);</code>
32426        */
32427       public abstract void bulkLoadHFile(
32428           com.google.protobuf.RpcController controller,
32429           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request,
32430           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse> done);
32431 
32432       /**
32433        * <code>rpc ExecService(.hbase.pb.CoprocessorServiceRequest) returns (.hbase.pb.CoprocessorServiceResponse);</code>
32434        */
32435       public abstract void execService(
32436           com.google.protobuf.RpcController controller,
32437           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
32438           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done);
32439 
32440       /**
32441        * <code>rpc ExecRegionServerService(.hbase.pb.CoprocessorServiceRequest) returns (.hbase.pb.CoprocessorServiceResponse);</code>
32442        */
32443       public abstract void execRegionServerService(
32444           com.google.protobuf.RpcController controller,
32445           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
32446           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done);
32447 
32448       /**
32449        * <code>rpc Multi(.hbase.pb.MultiRequest) returns (.hbase.pb.MultiResponse);</code>
32450        */
32451       public abstract void multi(
32452           com.google.protobuf.RpcController controller,
32453           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request,
32454           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse> done);
32455 
32456     }
32457 
32458     public static com.google.protobuf.Service newReflectiveService(
32459         final Interface impl) {
32460       return new ClientService() {
32461         @java.lang.Override
32462         public  void get(
32463             com.google.protobuf.RpcController controller,
32464             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request,
32465             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse> done) {
32466           impl.get(controller, request, done);
32467         }
32468 
32469         @java.lang.Override
32470         public  void mutate(
32471             com.google.protobuf.RpcController controller,
32472             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request,
32473             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse> done) {
32474           impl.mutate(controller, request, done);
32475         }
32476 
32477         @java.lang.Override
32478         public  void scan(
32479             com.google.protobuf.RpcController controller,
32480             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request,
32481             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse> done) {
32482           impl.scan(controller, request, done);
32483         }
32484 
32485         @java.lang.Override
32486         public  void bulkLoadHFile(
32487             com.google.protobuf.RpcController controller,
32488             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request,
32489             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse> done) {
32490           impl.bulkLoadHFile(controller, request, done);
32491         }
32492 
32493         @java.lang.Override
32494         public  void execService(
32495             com.google.protobuf.RpcController controller,
32496             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
32497             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done) {
32498           impl.execService(controller, request, done);
32499         }
32500 
32501         @java.lang.Override
32502         public  void execRegionServerService(
32503             com.google.protobuf.RpcController controller,
32504             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
32505             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done) {
32506           impl.execRegionServerService(controller, request, done);
32507         }
32508 
32509         @java.lang.Override
32510         public  void multi(
32511             com.google.protobuf.RpcController controller,
32512             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request,
32513             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse> done) {
32514           impl.multi(controller, request, done);
32515         }
32516 
32517       };
32518     }
32519 
32520     public static com.google.protobuf.BlockingService
32521         newReflectiveBlockingService(final BlockingInterface impl) {
32522       return new com.google.protobuf.BlockingService() {
32523         public final com.google.protobuf.Descriptors.ServiceDescriptor
32524             getDescriptorForType() {
32525           return getDescriptor();
32526         }
32527 
32528         public final com.google.protobuf.Message callBlockingMethod(
32529             com.google.protobuf.Descriptors.MethodDescriptor method,
32530             com.google.protobuf.RpcController controller,
32531             com.google.protobuf.Message request)
32532             throws com.google.protobuf.ServiceException {
32533           if (method.getService() != getDescriptor()) {
32534             throw new java.lang.IllegalArgumentException(
32535               "Service.callBlockingMethod() given method descriptor for " +
32536               "wrong service type.");
32537           }
32538           switch(method.getIndex()) {
32539             case 0:
32540               return impl.get(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest)request);
32541             case 1:
32542               return impl.mutate(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest)request);
32543             case 2:
32544               return impl.scan(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest)request);
32545             case 3:
32546               return impl.bulkLoadHFile(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest)request);
32547             case 4:
32548               return impl.execService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request);
32549             case 5:
32550               return impl.execRegionServerService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request);
32551             case 6:
32552               return impl.multi(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest)request);
32553             default:
32554               throw new java.lang.AssertionError("Can't get here.");
32555           }
32556         }
32557 
32558         public final com.google.protobuf.Message
32559             getRequestPrototype(
32560             com.google.protobuf.Descriptors.MethodDescriptor method) {
32561           if (method.getService() != getDescriptor()) {
32562             throw new java.lang.IllegalArgumentException(
32563               "Service.getRequestPrototype() given method " +
32564               "descriptor for wrong service type.");
32565           }
32566           switch(method.getIndex()) {
32567             case 0:
32568               return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.getDefaultInstance();
32569             case 1:
32570               return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.getDefaultInstance();
32571             case 2:
32572               return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.getDefaultInstance();
32573             case 3:
32574               return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.getDefaultInstance();
32575             case 4:
32576               return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance();
32577             case 5:
32578               return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance();
32579             case 6:
32580               return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance();
32581             default:
32582               throw new java.lang.AssertionError("Can't get here.");
32583           }
32584         }
32585 
32586         public final com.google.protobuf.Message
32587             getResponsePrototype(
32588             com.google.protobuf.Descriptors.MethodDescriptor method) {
32589           if (method.getService() != getDescriptor()) {
32590             throw new java.lang.IllegalArgumentException(
32591               "Service.getResponsePrototype() given method " +
32592               "descriptor for wrong service type.");
32593           }
32594           switch(method.getIndex()) {
32595             case 0:
32596               return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance();
32597             case 1:
32598               return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance();
32599             case 2:
32600               return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance();
32601             case 3:
32602               return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance();
32603             case 4:
32604               return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance();
32605             case 5:
32606               return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance();
32607             case 6:
32608               return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance();
32609             default:
32610               throw new java.lang.AssertionError("Can't get here.");
32611           }
32612         }
32613 
32614       };
32615     }
32616 
32617     /**
32618      * <code>rpc Get(.hbase.pb.GetRequest) returns (.hbase.pb.GetResponse);</code>
32619      */
32620     public abstract void get(
32621         com.google.protobuf.RpcController controller,
32622         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request,
32623         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse> done);
32624 
32625     /**
32626      * <code>rpc Mutate(.hbase.pb.MutateRequest) returns (.hbase.pb.MutateResponse);</code>
32627      */
32628     public abstract void mutate(
32629         com.google.protobuf.RpcController controller,
32630         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request,
32631         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse> done);
32632 
32633     /**
32634      * <code>rpc Scan(.hbase.pb.ScanRequest) returns (.hbase.pb.ScanResponse);</code>
32635      */
32636     public abstract void scan(
32637         com.google.protobuf.RpcController controller,
32638         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request,
32639         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse> done);
32640 
32641     /**
32642      * <code>rpc BulkLoadHFile(.hbase.pb.BulkLoadHFileRequest) returns (.hbase.pb.BulkLoadHFileResponse);</code>
32643      */
32644     public abstract void bulkLoadHFile(
32645         com.google.protobuf.RpcController controller,
32646         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request,
32647         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse> done);
32648 
32649     /**
32650      * <code>rpc ExecService(.hbase.pb.CoprocessorServiceRequest) returns (.hbase.pb.CoprocessorServiceResponse);</code>
32651      */
32652     public abstract void execService(
32653         com.google.protobuf.RpcController controller,
32654         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
32655         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done);
32656 
32657     /**
32658      * <code>rpc ExecRegionServerService(.hbase.pb.CoprocessorServiceRequest) returns (.hbase.pb.CoprocessorServiceResponse);</code>
32659      */
32660     public abstract void execRegionServerService(
32661         com.google.protobuf.RpcController controller,
32662         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
32663         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done);
32664 
32665     /**
32666      * <code>rpc Multi(.hbase.pb.MultiRequest) returns (.hbase.pb.MultiResponse);</code>
32667      */
32668     public abstract void multi(
32669         com.google.protobuf.RpcController controller,
32670         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request,
32671         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse> done);
32672 
32673     public static final
32674         com.google.protobuf.Descriptors.ServiceDescriptor
32675         getDescriptor() {
32676       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor().getServices().get(0);
32677     }
32678     public final com.google.protobuf.Descriptors.ServiceDescriptor
32679         getDescriptorForType() {
32680       return getDescriptor();
32681     }
32682 
32683     public final void callMethod(
32684         com.google.protobuf.Descriptors.MethodDescriptor method,
32685         com.google.protobuf.RpcController controller,
32686         com.google.protobuf.Message request,
32687         com.google.protobuf.RpcCallback<
32688           com.google.protobuf.Message> done) {
32689       if (method.getService() != getDescriptor()) {
32690         throw new java.lang.IllegalArgumentException(
32691           "Service.callMethod() given method descriptor for wrong " +
32692           "service type.");
32693       }
32694       switch(method.getIndex()) {
32695         case 0:
32696           this.get(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest)request,
32697             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse>specializeCallback(
32698               done));
32699           return;
32700         case 1:
32701           this.mutate(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest)request,
32702             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse>specializeCallback(
32703               done));
32704           return;
32705         case 2:
32706           this.scan(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest)request,
32707             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse>specializeCallback(
32708               done));
32709           return;
32710         case 3:
32711           this.bulkLoadHFile(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest)request,
32712             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse>specializeCallback(
32713               done));
32714           return;
32715         case 4:
32716           this.execService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request,
32717             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse>specializeCallback(
32718               done));
32719           return;
32720         case 5:
32721           this.execRegionServerService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request,
32722             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse>specializeCallback(
32723               done));
32724           return;
32725         case 6:
32726           this.multi(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest)request,
32727             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse>specializeCallback(
32728               done));
32729           return;
32730         default:
32731           throw new java.lang.AssertionError("Can't get here.");
32732       }
32733     }
32734 
32735     public final com.google.protobuf.Message
32736         getRequestPrototype(
32737         com.google.protobuf.Descriptors.MethodDescriptor method) {
32738       if (method.getService() != getDescriptor()) {
32739         throw new java.lang.IllegalArgumentException(
32740           "Service.getRequestPrototype() given method " +
32741           "descriptor for wrong service type.");
32742       }
32743       switch(method.getIndex()) {
32744         case 0:
32745           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.getDefaultInstance();
32746         case 1:
32747           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.getDefaultInstance();
32748         case 2:
32749           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.getDefaultInstance();
32750         case 3:
32751           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.getDefaultInstance();
32752         case 4:
32753           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance();
32754         case 5:
32755           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance();
32756         case 6:
32757           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance();
32758         default:
32759           throw new java.lang.AssertionError("Can't get here.");
32760       }
32761     }
32762 
32763     public final com.google.protobuf.Message
32764         getResponsePrototype(
32765         com.google.protobuf.Descriptors.MethodDescriptor method) {
32766       if (method.getService() != getDescriptor()) {
32767         throw new java.lang.IllegalArgumentException(
32768           "Service.getResponsePrototype() given method " +
32769           "descriptor for wrong service type.");
32770       }
32771       switch(method.getIndex()) {
32772         case 0:
32773           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance();
32774         case 1:
32775           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance();
32776         case 2:
32777           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance();
32778         case 3:
32779           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance();
32780         case 4:
32781           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance();
32782         case 5:
32783           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance();
32784         case 6:
32785           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance();
32786         default:
32787           throw new java.lang.AssertionError("Can't get here.");
32788       }
32789     }
32790 
32791     public static Stub newStub(
32792         com.google.protobuf.RpcChannel channel) {
32793       return new Stub(channel);
32794     }
32795 
32796     public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ClientService implements Interface {
32797       private Stub(com.google.protobuf.RpcChannel channel) {
32798         this.channel = channel;
32799       }
32800 
32801       private final com.google.protobuf.RpcChannel channel;
32802 
32803       public com.google.protobuf.RpcChannel getChannel() {
32804         return channel;
32805       }
32806 
32807       public  void get(
32808           com.google.protobuf.RpcController controller,
32809           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request,
32810           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse> done) {
32811         channel.callMethod(
32812           getDescriptor().getMethods().get(0),
32813           controller,
32814           request,
32815           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance(),
32816           com.google.protobuf.RpcUtil.generalizeCallback(
32817             done,
32818             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.class,
32819             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance()));
32820       }
32821 
32822       public  void mutate(
32823           com.google.protobuf.RpcController controller,
32824           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request,
32825           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse> done) {
32826         channel.callMethod(
32827           getDescriptor().getMethods().get(1),
32828           controller,
32829           request,
32830           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance(),
32831           com.google.protobuf.RpcUtil.generalizeCallback(
32832             done,
32833             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.class,
32834             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance()));
32835       }
32836 
32837       public  void scan(
32838           com.google.protobuf.RpcController controller,
32839           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request,
32840           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse> done) {
32841         channel.callMethod(
32842           getDescriptor().getMethods().get(2),
32843           controller,
32844           request,
32845           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance(),
32846           com.google.protobuf.RpcUtil.generalizeCallback(
32847             done,
32848             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.class,
32849             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance()));
32850       }
32851 
32852       public  void bulkLoadHFile(
32853           com.google.protobuf.RpcController controller,
32854           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request,
32855           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse> done) {
32856         channel.callMethod(
32857           getDescriptor().getMethods().get(3),
32858           controller,
32859           request,
32860           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance(),
32861           com.google.protobuf.RpcUtil.generalizeCallback(
32862             done,
32863             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.class,
32864             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance()));
32865       }
32866 
32867       public  void execService(
32868           com.google.protobuf.RpcController controller,
32869           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
32870           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done) {
32871         channel.callMethod(
32872           getDescriptor().getMethods().get(4),
32873           controller,
32874           request,
32875           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(),
32876           com.google.protobuf.RpcUtil.generalizeCallback(
32877             done,
32878             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.class,
32879             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance()));
32880       }
32881 
32882       public  void execRegionServerService(
32883           com.google.protobuf.RpcController controller,
32884           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
32885           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done) {
32886         channel.callMethod(
32887           getDescriptor().getMethods().get(5),
32888           controller,
32889           request,
32890           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(),
32891           com.google.protobuf.RpcUtil.generalizeCallback(
32892             done,
32893             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.class,
32894             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance()));
32895       }
32896 
32897       public  void multi(
32898           com.google.protobuf.RpcController controller,
32899           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request,
32900           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse> done) {
32901         channel.callMethod(
32902           getDescriptor().getMethods().get(6),
32903           controller,
32904           request,
32905           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance(),
32906           com.google.protobuf.RpcUtil.generalizeCallback(
32907             done,
32908             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.class,
32909             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance()));
32910       }
32911     }
32912 
32913     public static BlockingInterface newBlockingStub(
32914         com.google.protobuf.BlockingRpcChannel channel) {
32915       return new BlockingStub(channel);
32916     }
32917 
32918     public interface BlockingInterface {
32919       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse get(
32920           com.google.protobuf.RpcController controller,
32921           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request)
32922           throws com.google.protobuf.ServiceException;
32923 
32924       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse mutate(
32925           com.google.protobuf.RpcController controller,
32926           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request)
32927           throws com.google.protobuf.ServiceException;
32928 
32929       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse scan(
32930           com.google.protobuf.RpcController controller,
32931           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request)
32932           throws com.google.protobuf.ServiceException;
32933 
32934       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse bulkLoadHFile(
32935           com.google.protobuf.RpcController controller,
32936           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request)
32937           throws com.google.protobuf.ServiceException;
32938 
32939       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse execService(
32940           com.google.protobuf.RpcController controller,
32941           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request)
32942           throws com.google.protobuf.ServiceException;
32943 
32944       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse execRegionServerService(
32945           com.google.protobuf.RpcController controller,
32946           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request)
32947           throws com.google.protobuf.ServiceException;
32948 
32949       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse multi(
32950           com.google.protobuf.RpcController controller,
32951           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request)
32952           throws com.google.protobuf.ServiceException;
32953     }
32954 
32955     private static final class BlockingStub implements BlockingInterface {
32956       private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
32957         this.channel = channel;
32958       }
32959 
32960       private final com.google.protobuf.BlockingRpcChannel channel;
32961 
32962       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse get(
32963           com.google.protobuf.RpcController controller,
32964           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request)
32965           throws com.google.protobuf.ServiceException {
32966         return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse) channel.callBlockingMethod(
32967           getDescriptor().getMethods().get(0),
32968           controller,
32969           request,
32970           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance());
32971       }
32972 
32973 
32974       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse mutate(
32975           com.google.protobuf.RpcController controller,
32976           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request)
32977           throws com.google.protobuf.ServiceException {
32978         return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse) channel.callBlockingMethod(
32979           getDescriptor().getMethods().get(1),
32980           controller,
32981           request,
32982           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance());
32983       }
32984 
32985 
32986       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse scan(
32987           com.google.protobuf.RpcController controller,
32988           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request)
32989           throws com.google.protobuf.ServiceException {
32990         return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse) channel.callBlockingMethod(
32991           getDescriptor().getMethods().get(2),
32992           controller,
32993           request,
32994           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance());
32995       }
32996 
32997 
32998       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse bulkLoadHFile(
32999           com.google.protobuf.RpcController controller,
33000           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request)
33001           throws com.google.protobuf.ServiceException {
33002         return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse) channel.callBlockingMethod(
33003           getDescriptor().getMethods().get(3),
33004           controller,
33005           request,
33006           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance());
33007       }
33008 
33009 
33010       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse execService(
33011           com.google.protobuf.RpcController controller,
33012           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request)
33013           throws com.google.protobuf.ServiceException {
33014         return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse) channel.callBlockingMethod(
33015           getDescriptor().getMethods().get(4),
33016           controller,
33017           request,
33018           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance());
33019       }
33020 
33021 
33022       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse execRegionServerService(
33023           com.google.protobuf.RpcController controller,
33024           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request)
33025           throws com.google.protobuf.ServiceException {
33026         return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse) channel.callBlockingMethod(
33027           getDescriptor().getMethods().get(5),
33028           controller,
33029           request,
33030           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance());
33031       }
33032 
33033 
33034       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse multi(
33035           com.google.protobuf.RpcController controller,
33036           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request)
33037           throws com.google.protobuf.ServiceException {
33038         return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse) channel.callBlockingMethod(
33039           getDescriptor().getMethods().get(6),
33040           controller,
33041           request,
33042           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance());
33043       }
33044 
33045     }
33046 
33047     // @@protoc_insertion_point(class_scope:hbase.pb.ClientService)
33048   }
33049 
33050   private static com.google.protobuf.Descriptors.Descriptor
33051     internal_static_hbase_pb_Authorizations_descriptor;
33052   private static
33053     com.google.protobuf.GeneratedMessage.FieldAccessorTable
33054       internal_static_hbase_pb_Authorizations_fieldAccessorTable;
33055   private static com.google.protobuf.Descriptors.Descriptor
33056     internal_static_hbase_pb_CellVisibility_descriptor;
33057   private static
33058     com.google.protobuf.GeneratedMessage.FieldAccessorTable
33059       internal_static_hbase_pb_CellVisibility_fieldAccessorTable;
33060   private static com.google.protobuf.Descriptors.Descriptor
33061     internal_static_hbase_pb_Column_descriptor;
33062   private static
33063     com.google.protobuf.GeneratedMessage.FieldAccessorTable
33064       internal_static_hbase_pb_Column_fieldAccessorTable;
33065   private static com.google.protobuf.Descriptors.Descriptor
33066     internal_static_hbase_pb_Get_descriptor;
33067   private static
33068     com.google.protobuf.GeneratedMessage.FieldAccessorTable
33069       internal_static_hbase_pb_Get_fieldAccessorTable;
33070   private static com.google.protobuf.Descriptors.Descriptor
33071     internal_static_hbase_pb_Result_descriptor;
33072   private static
33073     com.google.protobuf.GeneratedMessage.FieldAccessorTable
33074       internal_static_hbase_pb_Result_fieldAccessorTable;
33075   private static com.google.protobuf.Descriptors.Descriptor
33076     internal_static_hbase_pb_GetRequest_descriptor;
33077   private static
33078     com.google.protobuf.GeneratedMessage.FieldAccessorTable
33079       internal_static_hbase_pb_GetRequest_fieldAccessorTable;
33080   private static com.google.protobuf.Descriptors.Descriptor
33081     internal_static_hbase_pb_GetResponse_descriptor;
33082   private static
33083     com.google.protobuf.GeneratedMessage.FieldAccessorTable
33084       internal_static_hbase_pb_GetResponse_fieldAccessorTable;
33085   private static com.google.protobuf.Descriptors.Descriptor
33086     internal_static_hbase_pb_Condition_descriptor;
33087   private static
33088     com.google.protobuf.GeneratedMessage.FieldAccessorTable
33089       internal_static_hbase_pb_Condition_fieldAccessorTable;
33090   private static com.google.protobuf.Descriptors.Descriptor
33091     internal_static_hbase_pb_MutationProto_descriptor;
33092   private static
33093     com.google.protobuf.GeneratedMessage.FieldAccessorTable
33094       internal_static_hbase_pb_MutationProto_fieldAccessorTable;
33095   private static com.google.protobuf.Descriptors.Descriptor
33096     internal_static_hbase_pb_MutationProto_ColumnValue_descriptor;
33097   private static
33098     com.google.protobuf.GeneratedMessage.FieldAccessorTable
33099       internal_static_hbase_pb_MutationProto_ColumnValue_fieldAccessorTable;
33100   private static com.google.protobuf.Descriptors.Descriptor
33101     internal_static_hbase_pb_MutationProto_ColumnValue_QualifierValue_descriptor;
33102   private static
33103     com.google.protobuf.GeneratedMessage.FieldAccessorTable
33104       internal_static_hbase_pb_MutationProto_ColumnValue_QualifierValue_fieldAccessorTable;
33105   private static com.google.protobuf.Descriptors.Descriptor
33106     internal_static_hbase_pb_MutateRequest_descriptor;
33107   private static
33108     com.google.protobuf.GeneratedMessage.FieldAccessorTable
33109       internal_static_hbase_pb_MutateRequest_fieldAccessorTable;
33110   private static com.google.protobuf.Descriptors.Descriptor
33111     internal_static_hbase_pb_MutateResponse_descriptor;
33112   private static
33113     com.google.protobuf.GeneratedMessage.FieldAccessorTable
33114       internal_static_hbase_pb_MutateResponse_fieldAccessorTable;
33115   private static com.google.protobuf.Descriptors.Descriptor
33116     internal_static_hbase_pb_Scan_descriptor;
33117   private static
33118     com.google.protobuf.GeneratedMessage.FieldAccessorTable
33119       internal_static_hbase_pb_Scan_fieldAccessorTable;
33120   private static com.google.protobuf.Descriptors.Descriptor
33121     internal_static_hbase_pb_ScanRequest_descriptor;
33122   private static
33123     com.google.protobuf.GeneratedMessage.FieldAccessorTable
33124       internal_static_hbase_pb_ScanRequest_fieldAccessorTable;
33125   private static com.google.protobuf.Descriptors.Descriptor
33126     internal_static_hbase_pb_ScanResponse_descriptor;
33127   private static
33128     com.google.protobuf.GeneratedMessage.FieldAccessorTable
33129       internal_static_hbase_pb_ScanResponse_fieldAccessorTable;
33130   private static com.google.protobuf.Descriptors.Descriptor
33131     internal_static_hbase_pb_BulkLoadHFileRequest_descriptor;
33132   private static
33133     com.google.protobuf.GeneratedMessage.FieldAccessorTable
33134       internal_static_hbase_pb_BulkLoadHFileRequest_fieldAccessorTable;
33135   private static com.google.protobuf.Descriptors.Descriptor
33136     internal_static_hbase_pb_BulkLoadHFileRequest_FamilyPath_descriptor;
33137   private static
33138     com.google.protobuf.GeneratedMessage.FieldAccessorTable
33139       internal_static_hbase_pb_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable;
33140   private static com.google.protobuf.Descriptors.Descriptor
33141     internal_static_hbase_pb_BulkLoadHFileResponse_descriptor;
33142   private static
33143     com.google.protobuf.GeneratedMessage.FieldAccessorTable
33144       internal_static_hbase_pb_BulkLoadHFileResponse_fieldAccessorTable;
33145   private static com.google.protobuf.Descriptors.Descriptor
33146     internal_static_hbase_pb_CoprocessorServiceCall_descriptor;
33147   private static
33148     com.google.protobuf.GeneratedMessage.FieldAccessorTable
33149       internal_static_hbase_pb_CoprocessorServiceCall_fieldAccessorTable;
33150   private static com.google.protobuf.Descriptors.Descriptor
33151     internal_static_hbase_pb_CoprocessorServiceResult_descriptor;
33152   private static
33153     com.google.protobuf.GeneratedMessage.FieldAccessorTable
33154       internal_static_hbase_pb_CoprocessorServiceResult_fieldAccessorTable;
33155   private static com.google.protobuf.Descriptors.Descriptor
33156     internal_static_hbase_pb_CoprocessorServiceRequest_descriptor;
33157   private static
33158     com.google.protobuf.GeneratedMessage.FieldAccessorTable
33159       internal_static_hbase_pb_CoprocessorServiceRequest_fieldAccessorTable;
33160   private static com.google.protobuf.Descriptors.Descriptor
33161     internal_static_hbase_pb_CoprocessorServiceResponse_descriptor;
33162   private static
33163     com.google.protobuf.GeneratedMessage.FieldAccessorTable
33164       internal_static_hbase_pb_CoprocessorServiceResponse_fieldAccessorTable;
33165   private static com.google.protobuf.Descriptors.Descriptor
33166     internal_static_hbase_pb_Action_descriptor;
33167   private static
33168     com.google.protobuf.GeneratedMessage.FieldAccessorTable
33169       internal_static_hbase_pb_Action_fieldAccessorTable;
33170   private static com.google.protobuf.Descriptors.Descriptor
33171     internal_static_hbase_pb_RegionAction_descriptor;
33172   private static
33173     com.google.protobuf.GeneratedMessage.FieldAccessorTable
33174       internal_static_hbase_pb_RegionAction_fieldAccessorTable;
33175   private static com.google.protobuf.Descriptors.Descriptor
33176     internal_static_hbase_pb_RegionLoadStats_descriptor;
33177   private static
33178     com.google.protobuf.GeneratedMessage.FieldAccessorTable
33179       internal_static_hbase_pb_RegionLoadStats_fieldAccessorTable;
33180   private static com.google.protobuf.Descriptors.Descriptor
33181     internal_static_hbase_pb_ResultOrException_descriptor;
33182   private static
33183     com.google.protobuf.GeneratedMessage.FieldAccessorTable
33184       internal_static_hbase_pb_ResultOrException_fieldAccessorTable;
33185   private static com.google.protobuf.Descriptors.Descriptor
33186     internal_static_hbase_pb_RegionActionResult_descriptor;
33187   private static
33188     com.google.protobuf.GeneratedMessage.FieldAccessorTable
33189       internal_static_hbase_pb_RegionActionResult_fieldAccessorTable;
33190   private static com.google.protobuf.Descriptors.Descriptor
33191     internal_static_hbase_pb_MultiRequest_descriptor;
33192   private static
33193     com.google.protobuf.GeneratedMessage.FieldAccessorTable
33194       internal_static_hbase_pb_MultiRequest_fieldAccessorTable;
33195   private static com.google.protobuf.Descriptors.Descriptor
33196     internal_static_hbase_pb_MultiResponse_descriptor;
33197   private static
33198     com.google.protobuf.GeneratedMessage.FieldAccessorTable
33199       internal_static_hbase_pb_MultiResponse_fieldAccessorTable;
33200 
33201   public static com.google.protobuf.Descriptors.FileDescriptor
33202       getDescriptor() {
33203     return descriptor;
33204   }
33205   private static com.google.protobuf.Descriptors.FileDescriptor
33206       descriptor;
33207   static {
33208     java.lang.String[] descriptorData = {
33209       "\n\014Client.proto\022\010hbase.pb\032\013HBase.proto\032\014F" +
33210       "ilter.proto\032\nCell.proto\032\020Comparator.prot" +
33211       "o\032\017MapReduce.proto\"\037\n\016Authorizations\022\r\n\005" +
33212       "label\030\001 \003(\t\"$\n\016CellVisibility\022\022\n\nexpress" +
33213       "ion\030\001 \002(\t\"+\n\006Column\022\016\n\006family\030\001 \002(\014\022\021\n\tq" +
33214       "ualifier\030\002 \003(\014\"\336\002\n\003Get\022\013\n\003row\030\001 \002(\014\022 \n\006c" +
33215       "olumn\030\002 \003(\0132\020.hbase.pb.Column\022*\n\tattribu" +
33216       "te\030\003 \003(\0132\027.hbase.pb.NameBytesPair\022 \n\006fil" +
33217       "ter\030\004 \001(\0132\020.hbase.pb.Filter\022\'\n\ntime_rang" +
33218       "e\030\005 \001(\0132\023.hbase.pb.TimeRange\022\027\n\014max_vers",
33219       "ions\030\006 \001(\r:\0011\022\032\n\014cache_blocks\030\007 \001(\010:\004tru" +
33220       "e\022\023\n\013store_limit\030\010 \001(\r\022\024\n\014store_offset\030\t" +
33221       " \001(\r\022\035\n\016existence_only\030\n \001(\010:\005false\0222\n\013c" +
33222       "onsistency\030\014 \001(\0162\025.hbase.pb.Consistency:" +
33223       "\006STRONG\"\203\001\n\006Result\022\034\n\004cell\030\001 \003(\0132\016.hbase" +
33224       ".pb.Cell\022\035\n\025associated_cell_count\030\002 \001(\005\022" +
33225       "\016\n\006exists\030\003 \001(\010\022\024\n\005stale\030\004 \001(\010:\005false\022\026\n" +
33226       "\007partial\030\005 \001(\010:\005false\"S\n\nGetRequest\022)\n\006r" +
33227       "egion\030\001 \002(\0132\031.hbase.pb.RegionSpecifier\022\032" +
33228       "\n\003get\030\002 \002(\0132\r.hbase.pb.Get\"/\n\013GetRespons",
33229       "e\022 \n\006result\030\001 \001(\0132\020.hbase.pb.Result\"\222\001\n\t" +
33230       "Condition\022\013\n\003row\030\001 \002(\014\022\016\n\006family\030\002 \002(\014\022\021" +
33231       "\n\tqualifier\030\003 \002(\014\022+\n\014compare_type\030\004 \002(\0162" +
33232       "\025.hbase.pb.CompareType\022(\n\ncomparator\030\005 \002" +
33233       "(\0132\024.hbase.pb.Comparator\"\364\006\n\rMutationPro" +
33234       "to\022\013\n\003row\030\001 \001(\014\0229\n\013mutate_type\030\002 \001(\0162$.h" +
33235       "base.pb.MutationProto.MutationType\0229\n\014co" +
33236       "lumn_value\030\003 \003(\0132#.hbase.pb.MutationProt" +
33237       "o.ColumnValue\022\021\n\ttimestamp\030\004 \001(\004\022*\n\tattr" +
33238       "ibute\030\005 \003(\0132\027.hbase.pb.NameBytesPair\022C\n\n",
33239       "durability\030\006 \001(\0162\".hbase.pb.MutationProt" +
33240       "o.Durability:\013USE_DEFAULT\022\'\n\ntime_range\030" +
33241       "\007 \001(\0132\023.hbase.pb.TimeRange\022\035\n\025associated" +
33242       "_cell_count\030\010 \001(\005\022\r\n\005nonce\030\t \001(\004\032\371\001\n\013Col" +
33243       "umnValue\022\016\n\006family\030\001 \002(\014\022K\n\017qualifier_va" +
33244       "lue\030\002 \003(\01322.hbase.pb.MutationProto.Colum" +
33245       "nValue.QualifierValue\032\214\001\n\016QualifierValue" +
33246       "\022\021\n\tqualifier\030\001 \001(\014\022\r\n\005value\030\002 \001(\014\022\021\n\tti" +
33247       "mestamp\030\003 \001(\004\0227\n\013delete_type\030\004 \001(\0162\".hba" +
33248       "se.pb.MutationProto.DeleteType\022\014\n\004tags\030\005",
33249       " \001(\014\"W\n\nDurability\022\017\n\013USE_DEFAULT\020\000\022\014\n\010S" +
33250       "KIP_WAL\020\001\022\r\n\tASYNC_WAL\020\002\022\014\n\010SYNC_WAL\020\003\022\r" +
33251       "\n\tFSYNC_WAL\020\004\">\n\014MutationType\022\n\n\006APPEND\020" +
33252       "\000\022\r\n\tINCREMENT\020\001\022\007\n\003PUT\020\002\022\n\n\006DELETE\020\003\"p\n" +
33253       "\nDeleteType\022\026\n\022DELETE_ONE_VERSION\020\000\022\034\n\030D" +
33254       "ELETE_MULTIPLE_VERSIONS\020\001\022\021\n\rDELETE_FAMI" +
33255       "LY\020\002\022\031\n\025DELETE_FAMILY_VERSION\020\003\"\242\001\n\rMuta" +
33256       "teRequest\022)\n\006region\030\001 \002(\0132\031.hbase.pb.Reg" +
33257       "ionSpecifier\022)\n\010mutation\030\002 \002(\0132\027.hbase.p" +
33258       "b.MutationProto\022&\n\tcondition\030\003 \001(\0132\023.hba",
33259       "se.pb.Condition\022\023\n\013nonce_group\030\004 \001(\004\"E\n\016" +
33260       "MutateResponse\022 \n\006result\030\001 \001(\0132\020.hbase.p" +
33261       "b.Result\022\021\n\tprocessed\030\002 \001(\010\"\205\004\n\004Scan\022 \n\006" +
33262       "column\030\001 \003(\0132\020.hbase.pb.Column\022*\n\tattrib" +
33263       "ute\030\002 \003(\0132\027.hbase.pb.NameBytesPair\022\021\n\tst" +
33264       "art_row\030\003 \001(\014\022\020\n\010stop_row\030\004 \001(\014\022 \n\006filte" +
33265       "r\030\005 \001(\0132\020.hbase.pb.Filter\022\'\n\ntime_range\030" +
33266       "\006 \001(\0132\023.hbase.pb.TimeRange\022\027\n\014max_versio" +
33267       "ns\030\007 \001(\r:\0011\022\032\n\014cache_blocks\030\010 \001(\010:\004true\022" +
33268       "\022\n\nbatch_size\030\t \001(\r\022\027\n\017max_result_size\030\n",
33269       " \001(\004\022\023\n\013store_limit\030\013 \001(\r\022\024\n\014store_offse" +
33270       "t\030\014 \001(\r\022&\n\036load_column_families_on_deman" +
33271       "d\030\r \001(\010\022\r\n\005small\030\016 \001(\010\022\027\n\010reversed\030\017 \001(\010" +
33272       ":\005false\0222\n\013consistency\030\020 \001(\0162\025.hbase.pb." +
33273       "Consistency:\006STRONG\022\017\n\007caching\030\021 \001(\r\022\035\n\025" +
33274       "allow_partial_results\030\022 \001(\010\"\220\002\n\013ScanRequ" +
33275       "est\022)\n\006region\030\001 \001(\0132\031.hbase.pb.RegionSpe" +
33276       "cifier\022\034\n\004scan\030\002 \001(\0132\016.hbase.pb.Scan\022\022\n\n" +
33277       "scanner_id\030\003 \001(\004\022\026\n\016number_of_rows\030\004 \001(\r" +
33278       "\022\025\n\rclose_scanner\030\005 \001(\010\022\025\n\rnext_call_seq",
33279       "\030\006 \001(\004\022\037\n\027client_handles_partials\030\007 \001(\010\022" +
33280       "!\n\031client_handles_heartbeats\030\010 \001(\010\022\032\n\022tr" +
33281       "ack_scan_metrics\030\t \001(\010\"\232\002\n\014ScanResponse\022" +
33282       "\030\n\020cells_per_result\030\001 \003(\r\022\022\n\nscanner_id\030" +
33283       "\002 \001(\004\022\024\n\014more_results\030\003 \001(\010\022\013\n\003ttl\030\004 \001(\r" +
33284       "\022!\n\007results\030\005 \003(\0132\020.hbase.pb.Result\022\r\n\005s" +
33285       "tale\030\006 \001(\010\022\037\n\027partial_flag_per_result\030\007 " +
33286       "\003(\010\022\036\n\026more_results_in_region\030\010 \001(\010\022\031\n\021h" +
33287       "eartbeat_message\030\t \001(\010\022+\n\014scan_metrics\030\n" +
33288       " \001(\0132\025.hbase.pb.ScanMetrics\"\305\001\n\024BulkLoad",
33289       "HFileRequest\022)\n\006region\030\001 \002(\0132\031.hbase.pb." +
33290       "RegionSpecifier\022>\n\013family_path\030\002 \003(\0132).h" +
33291       "base.pb.BulkLoadHFileRequest.FamilyPath\022" +
33292       "\026\n\016assign_seq_num\030\003 \001(\010\032*\n\nFamilyPath\022\016\n" +
33293       "\006family\030\001 \002(\014\022\014\n\004path\030\002 \002(\t\"\'\n\025BulkLoadH" +
33294       "FileResponse\022\016\n\006loaded\030\001 \002(\010\"a\n\026Coproces" +
33295       "sorServiceCall\022\013\n\003row\030\001 \002(\014\022\024\n\014service_n" +
33296       "ame\030\002 \002(\t\022\023\n\013method_name\030\003 \002(\t\022\017\n\007reques" +
33297       "t\030\004 \002(\014\"B\n\030CoprocessorServiceResult\022&\n\005v" +
33298       "alue\030\001 \001(\0132\027.hbase.pb.NameBytesPair\"v\n\031C",
33299       "oprocessorServiceRequest\022)\n\006region\030\001 \002(\013" +
33300       "2\031.hbase.pb.RegionSpecifier\022.\n\004call\030\002 \002(" +
33301       "\0132 .hbase.pb.CoprocessorServiceCall\"o\n\032C" +
33302       "oprocessorServiceResponse\022)\n\006region\030\001 \002(" +
33303       "\0132\031.hbase.pb.RegionSpecifier\022&\n\005value\030\002 " +
33304       "\002(\0132\027.hbase.pb.NameBytesPair\"\226\001\n\006Action\022" +
33305       "\r\n\005index\030\001 \001(\r\022)\n\010mutation\030\002 \001(\0132\027.hbase" +
33306       ".pb.MutationProto\022\032\n\003get\030\003 \001(\0132\r.hbase.p" +
33307       "b.Get\0226\n\014service_call\030\004 \001(\0132 .hbase.pb.C" +
33308       "oprocessorServiceCall\"k\n\014RegionAction\022)\n",
33309       "\006region\030\001 \002(\0132\031.hbase.pb.RegionSpecifier" +
33310       "\022\016\n\006atomic\030\002 \001(\010\022 \n\006action\030\003 \003(\0132\020.hbase" +
33311       ".pb.Action\"D\n\017RegionLoadStats\022\027\n\014memstor" +
33312       "eLoad\030\001 \001(\005:\0010\022\030\n\rheapOccupancy\030\002 \001(\005:\0010" +
33313       "\"\332\001\n\021ResultOrException\022\r\n\005index\030\001 \001(\r\022 \n" +
33314       "\006result\030\002 \001(\0132\020.hbase.pb.Result\022*\n\texcep" +
33315       "tion\030\003 \001(\0132\027.hbase.pb.NameBytesPair\022:\n\016s" +
33316       "ervice_result\030\004 \001(\0132\".hbase.pb.Coprocess" +
33317       "orServiceResult\022,\n\tloadStats\030\005 \001(\0132\031.hba" +
33318       "se.pb.RegionLoadStats\"x\n\022RegionActionRes",
33319       "ult\0226\n\021resultOrException\030\001 \003(\0132\033.hbase.p" +
33320       "b.ResultOrException\022*\n\texception\030\002 \001(\0132\027" +
33321       ".hbase.pb.NameBytesPair\"x\n\014MultiRequest\022" +
33322       ",\n\014regionAction\030\001 \003(\0132\026.hbase.pb.RegionA" +
33323       "ction\022\022\n\nnonceGroup\030\002 \001(\004\022&\n\tcondition\030\003" +
33324       " \001(\0132\023.hbase.pb.Condition\"\\\n\rMultiRespon" +
33325       "se\0228\n\022regionActionResult\030\001 \003(\0132\034.hbase.p" +
33326       "b.RegionActionResult\022\021\n\tprocessed\030\002 \001(\010*" +
33327       "\'\n\013Consistency\022\n\n\006STRONG\020\000\022\014\n\010TIMELINE\020\001" +
33328       "2\203\004\n\rClientService\0222\n\003Get\022\024.hbase.pb.Get",
33329       "Request\032\025.hbase.pb.GetResponse\022;\n\006Mutate" +
33330       "\022\027.hbase.pb.MutateRequest\032\030.hbase.pb.Mut" +
33331       "ateResponse\0225\n\004Scan\022\025.hbase.pb.ScanReque" +
33332       "st\032\026.hbase.pb.ScanResponse\022P\n\rBulkLoadHF" +
33333       "ile\022\036.hbase.pb.BulkLoadHFileRequest\032\037.hb" +
33334       "ase.pb.BulkLoadHFileResponse\022X\n\013ExecServ" +
33335       "ice\022#.hbase.pb.CoprocessorServiceRequest" +
33336       "\032$.hbase.pb.CoprocessorServiceResponse\022d" +
33337       "\n\027ExecRegionServerService\022#.hbase.pb.Cop" +
33338       "rocessorServiceRequest\032$.hbase.pb.Coproc",
33339       "essorServiceResponse\0228\n\005Multi\022\026.hbase.pb" +
33340       ".MultiRequest\032\027.hbase.pb.MultiResponseBB" +
33341       "\n*org.apache.hadoop.hbase.protobuf.gener" +
33342       "atedB\014ClientProtosH\001\210\001\001\240\001\001"
33343     };
33344     com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
33345       new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
33346         public com.google.protobuf.ExtensionRegistry assignDescriptors(
33347             com.google.protobuf.Descriptors.FileDescriptor root) {
33348           descriptor = root;
33349           internal_static_hbase_pb_Authorizations_descriptor =
33350             getDescriptor().getMessageTypes().get(0);
33351           internal_static_hbase_pb_Authorizations_fieldAccessorTable = new
33352             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33353               internal_static_hbase_pb_Authorizations_descriptor,
33354               new java.lang.String[] { "Label", });
33355           internal_static_hbase_pb_CellVisibility_descriptor =
33356             getDescriptor().getMessageTypes().get(1);
33357           internal_static_hbase_pb_CellVisibility_fieldAccessorTable = new
33358             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33359               internal_static_hbase_pb_CellVisibility_descriptor,
33360               new java.lang.String[] { "Expression", });
33361           internal_static_hbase_pb_Column_descriptor =
33362             getDescriptor().getMessageTypes().get(2);
33363           internal_static_hbase_pb_Column_fieldAccessorTable = new
33364             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33365               internal_static_hbase_pb_Column_descriptor,
33366               new java.lang.String[] { "Family", "Qualifier", });
33367           internal_static_hbase_pb_Get_descriptor =
33368             getDescriptor().getMessageTypes().get(3);
33369           internal_static_hbase_pb_Get_fieldAccessorTable = new
33370             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33371               internal_static_hbase_pb_Get_descriptor,
33372               new java.lang.String[] { "Row", "Column", "Attribute", "Filter", "TimeRange", "MaxVersions", "CacheBlocks", "StoreLimit", "StoreOffset", "ExistenceOnly", "Consistency", });
33373           internal_static_hbase_pb_Result_descriptor =
33374             getDescriptor().getMessageTypes().get(4);
33375           internal_static_hbase_pb_Result_fieldAccessorTable = new
33376             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33377               internal_static_hbase_pb_Result_descriptor,
33378               new java.lang.String[] { "Cell", "AssociatedCellCount", "Exists", "Stale", "Partial", });
33379           internal_static_hbase_pb_GetRequest_descriptor =
33380             getDescriptor().getMessageTypes().get(5);
33381           internal_static_hbase_pb_GetRequest_fieldAccessorTable = new
33382             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33383               internal_static_hbase_pb_GetRequest_descriptor,
33384               new java.lang.String[] { "Region", "Get", });
33385           internal_static_hbase_pb_GetResponse_descriptor =
33386             getDescriptor().getMessageTypes().get(6);
33387           internal_static_hbase_pb_GetResponse_fieldAccessorTable = new
33388             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33389               internal_static_hbase_pb_GetResponse_descriptor,
33390               new java.lang.String[] { "Result", });
33391           internal_static_hbase_pb_Condition_descriptor =
33392             getDescriptor().getMessageTypes().get(7);
33393           internal_static_hbase_pb_Condition_fieldAccessorTable = new
33394             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33395               internal_static_hbase_pb_Condition_descriptor,
33396               new java.lang.String[] { "Row", "Family", "Qualifier", "CompareType", "Comparator", });
33397           internal_static_hbase_pb_MutationProto_descriptor =
33398             getDescriptor().getMessageTypes().get(8);
33399           internal_static_hbase_pb_MutationProto_fieldAccessorTable = new
33400             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33401               internal_static_hbase_pb_MutationProto_descriptor,
33402               new java.lang.String[] { "Row", "MutateType", "ColumnValue", "Timestamp", "Attribute", "Durability", "TimeRange", "AssociatedCellCount", "Nonce", });
33403           internal_static_hbase_pb_MutationProto_ColumnValue_descriptor =
33404             internal_static_hbase_pb_MutationProto_descriptor.getNestedTypes().get(0);
33405           internal_static_hbase_pb_MutationProto_ColumnValue_fieldAccessorTable = new
33406             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33407               internal_static_hbase_pb_MutationProto_ColumnValue_descriptor,
33408               new java.lang.String[] { "Family", "QualifierValue", });
33409           internal_static_hbase_pb_MutationProto_ColumnValue_QualifierValue_descriptor =
33410             internal_static_hbase_pb_MutationProto_ColumnValue_descriptor.getNestedTypes().get(0);
33411           internal_static_hbase_pb_MutationProto_ColumnValue_QualifierValue_fieldAccessorTable = new
33412             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33413               internal_static_hbase_pb_MutationProto_ColumnValue_QualifierValue_descriptor,
33414               new java.lang.String[] { "Qualifier", "Value", "Timestamp", "DeleteType", "Tags", });
33415           internal_static_hbase_pb_MutateRequest_descriptor =
33416             getDescriptor().getMessageTypes().get(9);
33417           internal_static_hbase_pb_MutateRequest_fieldAccessorTable = new
33418             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33419               internal_static_hbase_pb_MutateRequest_descriptor,
33420               new java.lang.String[] { "Region", "Mutation", "Condition", "NonceGroup", });
33421           internal_static_hbase_pb_MutateResponse_descriptor =
33422             getDescriptor().getMessageTypes().get(10);
33423           internal_static_hbase_pb_MutateResponse_fieldAccessorTable = new
33424             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33425               internal_static_hbase_pb_MutateResponse_descriptor,
33426               new java.lang.String[] { "Result", "Processed", });
33427           internal_static_hbase_pb_Scan_descriptor =
33428             getDescriptor().getMessageTypes().get(11);
33429           internal_static_hbase_pb_Scan_fieldAccessorTable = new
33430             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33431               internal_static_hbase_pb_Scan_descriptor,
33432               new java.lang.String[] { "Column", "Attribute", "StartRow", "StopRow", "Filter", "TimeRange", "MaxVersions", "CacheBlocks", "BatchSize", "MaxResultSize", "StoreLimit", "StoreOffset", "LoadColumnFamiliesOnDemand", "Small", "Reversed", "Consistency", "Caching", "AllowPartialResults", });
33433           internal_static_hbase_pb_ScanRequest_descriptor =
33434             getDescriptor().getMessageTypes().get(12);
33435           internal_static_hbase_pb_ScanRequest_fieldAccessorTable = new
33436             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33437               internal_static_hbase_pb_ScanRequest_descriptor,
33438               new java.lang.String[] { "Region", "Scan", "ScannerId", "NumberOfRows", "CloseScanner", "NextCallSeq", "ClientHandlesPartials", "ClientHandlesHeartbeats", "TrackScanMetrics", });
33439           internal_static_hbase_pb_ScanResponse_descriptor =
33440             getDescriptor().getMessageTypes().get(13);
33441           internal_static_hbase_pb_ScanResponse_fieldAccessorTable = new
33442             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33443               internal_static_hbase_pb_ScanResponse_descriptor,
33444               new java.lang.String[] { "CellsPerResult", "ScannerId", "MoreResults", "Ttl", "Results", "Stale", "PartialFlagPerResult", "MoreResultsInRegion", "HeartbeatMessage", "ScanMetrics", });
33445           internal_static_hbase_pb_BulkLoadHFileRequest_descriptor =
33446             getDescriptor().getMessageTypes().get(14);
33447           internal_static_hbase_pb_BulkLoadHFileRequest_fieldAccessorTable = new
33448             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33449               internal_static_hbase_pb_BulkLoadHFileRequest_descriptor,
33450               new java.lang.String[] { "Region", "FamilyPath", "AssignSeqNum", });
33451           internal_static_hbase_pb_BulkLoadHFileRequest_FamilyPath_descriptor =
33452             internal_static_hbase_pb_BulkLoadHFileRequest_descriptor.getNestedTypes().get(0);
33453           internal_static_hbase_pb_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable = new
33454             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33455               internal_static_hbase_pb_BulkLoadHFileRequest_FamilyPath_descriptor,
33456               new java.lang.String[] { "Family", "Path", });
33457           internal_static_hbase_pb_BulkLoadHFileResponse_descriptor =
33458             getDescriptor().getMessageTypes().get(15);
33459           internal_static_hbase_pb_BulkLoadHFileResponse_fieldAccessorTable = new
33460             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33461               internal_static_hbase_pb_BulkLoadHFileResponse_descriptor,
33462               new java.lang.String[] { "Loaded", });
33463           internal_static_hbase_pb_CoprocessorServiceCall_descriptor =
33464             getDescriptor().getMessageTypes().get(16);
33465           internal_static_hbase_pb_CoprocessorServiceCall_fieldAccessorTable = new
33466             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33467               internal_static_hbase_pb_CoprocessorServiceCall_descriptor,
33468               new java.lang.String[] { "Row", "ServiceName", "MethodName", "Request", });
33469           internal_static_hbase_pb_CoprocessorServiceResult_descriptor =
33470             getDescriptor().getMessageTypes().get(17);
33471           internal_static_hbase_pb_CoprocessorServiceResult_fieldAccessorTable = new
33472             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33473               internal_static_hbase_pb_CoprocessorServiceResult_descriptor,
33474               new java.lang.String[] { "Value", });
33475           internal_static_hbase_pb_CoprocessorServiceRequest_descriptor =
33476             getDescriptor().getMessageTypes().get(18);
33477           internal_static_hbase_pb_CoprocessorServiceRequest_fieldAccessorTable = new
33478             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33479               internal_static_hbase_pb_CoprocessorServiceRequest_descriptor,
33480               new java.lang.String[] { "Region", "Call", });
33481           internal_static_hbase_pb_CoprocessorServiceResponse_descriptor =
33482             getDescriptor().getMessageTypes().get(19);
33483           internal_static_hbase_pb_CoprocessorServiceResponse_fieldAccessorTable = new
33484             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33485               internal_static_hbase_pb_CoprocessorServiceResponse_descriptor,
33486               new java.lang.String[] { "Region", "Value", });
33487           internal_static_hbase_pb_Action_descriptor =
33488             getDescriptor().getMessageTypes().get(20);
33489           internal_static_hbase_pb_Action_fieldAccessorTable = new
33490             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33491               internal_static_hbase_pb_Action_descriptor,
33492               new java.lang.String[] { "Index", "Mutation", "Get", "ServiceCall", });
33493           internal_static_hbase_pb_RegionAction_descriptor =
33494             getDescriptor().getMessageTypes().get(21);
33495           internal_static_hbase_pb_RegionAction_fieldAccessorTable = new
33496             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33497               internal_static_hbase_pb_RegionAction_descriptor,
33498               new java.lang.String[] { "Region", "Atomic", "Action", });
33499           internal_static_hbase_pb_RegionLoadStats_descriptor =
33500             getDescriptor().getMessageTypes().get(22);
33501           internal_static_hbase_pb_RegionLoadStats_fieldAccessorTable = new
33502             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33503               internal_static_hbase_pb_RegionLoadStats_descriptor,
33504               new java.lang.String[] { "MemstoreLoad", "HeapOccupancy", });
33505           internal_static_hbase_pb_ResultOrException_descriptor =
33506             getDescriptor().getMessageTypes().get(23);
33507           internal_static_hbase_pb_ResultOrException_fieldAccessorTable = new
33508             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33509               internal_static_hbase_pb_ResultOrException_descriptor,
33510               new java.lang.String[] { "Index", "Result", "Exception", "ServiceResult", "LoadStats", });
33511           internal_static_hbase_pb_RegionActionResult_descriptor =
33512             getDescriptor().getMessageTypes().get(24);
33513           internal_static_hbase_pb_RegionActionResult_fieldAccessorTable = new
33514             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33515               internal_static_hbase_pb_RegionActionResult_descriptor,
33516               new java.lang.String[] { "ResultOrException", "Exception", });
33517           internal_static_hbase_pb_MultiRequest_descriptor =
33518             getDescriptor().getMessageTypes().get(25);
33519           internal_static_hbase_pb_MultiRequest_fieldAccessorTable = new
33520             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33521               internal_static_hbase_pb_MultiRequest_descriptor,
33522               new java.lang.String[] { "RegionAction", "NonceGroup", "Condition", });
33523           internal_static_hbase_pb_MultiResponse_descriptor =
33524             getDescriptor().getMessageTypes().get(26);
33525           internal_static_hbase_pb_MultiResponse_fieldAccessorTable = new
33526             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33527               internal_static_hbase_pb_MultiResponse_descriptor,
33528               new java.lang.String[] { "RegionActionResult", "Processed", });
33529           return null;
33530         }
33531       };
33532     com.google.protobuf.Descriptors.FileDescriptor
33533       .internalBuildGeneratedFileFrom(descriptorData,
33534         new com.google.protobuf.Descriptors.FileDescriptor[] {
33535           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(),
33536           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.getDescriptor(),
33537           org.apache.hadoop.hbase.protobuf.generated.CellProtos.getDescriptor(),
33538           org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.getDescriptor(),
33539           org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.getDescriptor(),
33540         }, assigner);
33541   }
33542 
33543   // @@protoc_insertion_point(outer_class_scope)
33544 }