View Javadoc

1   // Generated by the protocol buffer compiler.  DO NOT EDIT!
2   // source: WAL.proto
3   
4   package org.apache.hadoop.hbase.protobuf.generated;
5   
6   public final class WALProtos {
7     private WALProtos() {}
8     public static void registerAllExtensions(
9         com.google.protobuf.ExtensionRegistry registry) {
10    }
11    /**
12     * Protobuf enum {@code hbase.pb.ScopeType}
13     */
14    public enum ScopeType
15        implements com.google.protobuf.ProtocolMessageEnum {
16      /**
17       * <code>REPLICATION_SCOPE_LOCAL = 0;</code>
18       */
19      REPLICATION_SCOPE_LOCAL(0, 0),
20      /**
21       * <code>REPLICATION_SCOPE_GLOBAL = 1;</code>
22       */
23      REPLICATION_SCOPE_GLOBAL(1, 1),
24      ;
25  
26      /**
27       * <code>REPLICATION_SCOPE_LOCAL = 0;</code>
28       */
29      public static final int REPLICATION_SCOPE_LOCAL_VALUE = 0;
30      /**
31       * <code>REPLICATION_SCOPE_GLOBAL = 1;</code>
32       */
33      public static final int REPLICATION_SCOPE_GLOBAL_VALUE = 1;
34  
35  
36      public final int getNumber() { return value; }
37  
38      public static ScopeType valueOf(int value) {
39        switch (value) {
40          case 0: return REPLICATION_SCOPE_LOCAL;
41          case 1: return REPLICATION_SCOPE_GLOBAL;
42          default: return null;
43        }
44      }
45  
46      public static com.google.protobuf.Internal.EnumLiteMap<ScopeType>
47          internalGetValueMap() {
48        return internalValueMap;
49      }
50      private static com.google.protobuf.Internal.EnumLiteMap<ScopeType>
51          internalValueMap =
52            new com.google.protobuf.Internal.EnumLiteMap<ScopeType>() {
53              public ScopeType findValueByNumber(int number) {
54                return ScopeType.valueOf(number);
55              }
56            };
57  
58      public final com.google.protobuf.Descriptors.EnumValueDescriptor
59          getValueDescriptor() {
60        return getDescriptor().getValues().get(index);
61      }
62      public final com.google.protobuf.Descriptors.EnumDescriptor
63          getDescriptorForType() {
64        return getDescriptor();
65      }
66      public static final com.google.protobuf.Descriptors.EnumDescriptor
67          getDescriptor() {
68        return org.apache.hadoop.hbase.protobuf.generated.WALProtos.getDescriptor().getEnumTypes().get(0);
69      }
70  
71      private static final ScopeType[] VALUES = values();
72  
73      public static ScopeType valueOf(
74          com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
75        if (desc.getType() != getDescriptor()) {
76          throw new java.lang.IllegalArgumentException(
77            "EnumValueDescriptor is not for this type.");
78        }
79        return VALUES[desc.getIndex()];
80      }
81  
82      private final int index;
83      private final int value;
84  
85      private ScopeType(int index, int value) {
86        this.index = index;
87        this.value = value;
88      }
89  
90      // @@protoc_insertion_point(enum_scope:hbase.pb.ScopeType)
91    }
92  
93    public interface WALHeaderOrBuilder
94        extends com.google.protobuf.MessageOrBuilder {
95  
96      // optional bool has_compression = 1;
97      /**
98       * <code>optional bool has_compression = 1;</code>
99       */
100     boolean hasHasCompression();
101     /**
102      * <code>optional bool has_compression = 1;</code>
103      */
104     boolean getHasCompression();
105 
106     // optional bytes encryption_key = 2;
107     /**
108      * <code>optional bytes encryption_key = 2;</code>
109      */
110     boolean hasEncryptionKey();
111     /**
112      * <code>optional bytes encryption_key = 2;</code>
113      */
114     com.google.protobuf.ByteString getEncryptionKey();
115 
116     // optional bool has_tag_compression = 3;
117     /**
118      * <code>optional bool has_tag_compression = 3;</code>
119      */
120     boolean hasHasTagCompression();
121     /**
122      * <code>optional bool has_tag_compression = 3;</code>
123      */
124     boolean getHasTagCompression();
125 
126     // optional string writer_cls_name = 4;
127     /**
128      * <code>optional string writer_cls_name = 4;</code>
129      */
130     boolean hasWriterClsName();
131     /**
132      * <code>optional string writer_cls_name = 4;</code>
133      */
134     java.lang.String getWriterClsName();
135     /**
136      * <code>optional string writer_cls_name = 4;</code>
137      */
138     com.google.protobuf.ByteString
139         getWriterClsNameBytes();
140 
141     // optional string cell_codec_cls_name = 5;
142     /**
143      * <code>optional string cell_codec_cls_name = 5;</code>
144      */
145     boolean hasCellCodecClsName();
146     /**
147      * <code>optional string cell_codec_cls_name = 5;</code>
148      */
149     java.lang.String getCellCodecClsName();
150     /**
151      * <code>optional string cell_codec_cls_name = 5;</code>
152      */
153     com.google.protobuf.ByteString
154         getCellCodecClsNameBytes();
155   }
156   /**
157    * Protobuf type {@code hbase.pb.WALHeader}
158    */
159   public static final class WALHeader extends
160       com.google.protobuf.GeneratedMessage
161       implements WALHeaderOrBuilder {
162     // Use WALHeader.newBuilder() to construct.
163     private WALHeader(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
164       super(builder);
165       this.unknownFields = builder.getUnknownFields();
166     }
167     private WALHeader(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
168 
169     private static final WALHeader defaultInstance;
170     public static WALHeader getDefaultInstance() {
171       return defaultInstance;
172     }
173 
174     public WALHeader getDefaultInstanceForType() {
175       return defaultInstance;
176     }
177 
178     private final com.google.protobuf.UnknownFieldSet unknownFields;
179     @java.lang.Override
180     public final com.google.protobuf.UnknownFieldSet
181         getUnknownFields() {
182       return this.unknownFields;
183     }
184     private WALHeader(
185         com.google.protobuf.CodedInputStream input,
186         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
187         throws com.google.protobuf.InvalidProtocolBufferException {
188       initFields();
189       int mutable_bitField0_ = 0;
190       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
191           com.google.protobuf.UnknownFieldSet.newBuilder();
192       try {
193         boolean done = false;
194         while (!done) {
195           int tag = input.readTag();
196           switch (tag) {
197             case 0:
198               done = true;
199               break;
200             default: {
201               if (!parseUnknownField(input, unknownFields,
202                                      extensionRegistry, tag)) {
203                 done = true;
204               }
205               break;
206             }
207             case 8: {
208               bitField0_ |= 0x00000001;
209               hasCompression_ = input.readBool();
210               break;
211             }
212             case 18: {
213               bitField0_ |= 0x00000002;
214               encryptionKey_ = input.readBytes();
215               break;
216             }
217             case 24: {
218               bitField0_ |= 0x00000004;
219               hasTagCompression_ = input.readBool();
220               break;
221             }
222             case 34: {
223               bitField0_ |= 0x00000008;
224               writerClsName_ = input.readBytes();
225               break;
226             }
227             case 42: {
228               bitField0_ |= 0x00000010;
229               cellCodecClsName_ = input.readBytes();
230               break;
231             }
232           }
233         }
234       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
235         throw e.setUnfinishedMessage(this);
236       } catch (java.io.IOException e) {
237         throw new com.google.protobuf.InvalidProtocolBufferException(
238             e.getMessage()).setUnfinishedMessage(this);
239       } finally {
240         this.unknownFields = unknownFields.build();
241         makeExtensionsImmutable();
242       }
243     }
244     public static final com.google.protobuf.Descriptors.Descriptor
245         getDescriptor() {
246       return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_WALHeader_descriptor;
247     }
248 
249     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
250         internalGetFieldAccessorTable() {
251       return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_WALHeader_fieldAccessorTable
252           .ensureFieldAccessorsInitialized(
253               org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader.Builder.class);
254     }
255 
256     public static com.google.protobuf.Parser<WALHeader> PARSER =
257         new com.google.protobuf.AbstractParser<WALHeader>() {
258       public WALHeader parsePartialFrom(
259           com.google.protobuf.CodedInputStream input,
260           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
261           throws com.google.protobuf.InvalidProtocolBufferException {
262         return new WALHeader(input, extensionRegistry);
263       }
264     };
265 
266     @java.lang.Override
267     public com.google.protobuf.Parser<WALHeader> getParserForType() {
268       return PARSER;
269     }
270 
271     private int bitField0_;
272     // optional bool has_compression = 1;
273     public static final int HAS_COMPRESSION_FIELD_NUMBER = 1;
274     private boolean hasCompression_;
275     /**
276      * <code>optional bool has_compression = 1;</code>
277      */
278     public boolean hasHasCompression() {
279       return ((bitField0_ & 0x00000001) == 0x00000001);
280     }
281     /**
282      * <code>optional bool has_compression = 1;</code>
283      */
284     public boolean getHasCompression() {
285       return hasCompression_;
286     }
287 
288     // optional bytes encryption_key = 2;
289     public static final int ENCRYPTION_KEY_FIELD_NUMBER = 2;
290     private com.google.protobuf.ByteString encryptionKey_;
291     /**
292      * <code>optional bytes encryption_key = 2;</code>
293      */
294     public boolean hasEncryptionKey() {
295       return ((bitField0_ & 0x00000002) == 0x00000002);
296     }
297     /**
298      * <code>optional bytes encryption_key = 2;</code>
299      */
300     public com.google.protobuf.ByteString getEncryptionKey() {
301       return encryptionKey_;
302     }
303 
304     // optional bool has_tag_compression = 3;
305     public static final int HAS_TAG_COMPRESSION_FIELD_NUMBER = 3;
306     private boolean hasTagCompression_;
307     /**
308      * <code>optional bool has_tag_compression = 3;</code>
309      */
310     public boolean hasHasTagCompression() {
311       return ((bitField0_ & 0x00000004) == 0x00000004);
312     }
313     /**
314      * <code>optional bool has_tag_compression = 3;</code>
315      */
316     public boolean getHasTagCompression() {
317       return hasTagCompression_;
318     }
319 
320     // optional string writer_cls_name = 4;
321     public static final int WRITER_CLS_NAME_FIELD_NUMBER = 4;
322     private java.lang.Object writerClsName_;
323     /**
324      * <code>optional string writer_cls_name = 4;</code>
325      */
326     public boolean hasWriterClsName() {
327       return ((bitField0_ & 0x00000008) == 0x00000008);
328     }
329     /**
330      * <code>optional string writer_cls_name = 4;</code>
331      */
332     public java.lang.String getWriterClsName() {
333       java.lang.Object ref = writerClsName_;
334       if (ref instanceof java.lang.String) {
335         return (java.lang.String) ref;
336       } else {
337         com.google.protobuf.ByteString bs = 
338             (com.google.protobuf.ByteString) ref;
339         java.lang.String s = bs.toStringUtf8();
340         if (bs.isValidUtf8()) {
341           writerClsName_ = s;
342         }
343         return s;
344       }
345     }
346     /**
347      * <code>optional string writer_cls_name = 4;</code>
348      */
349     public com.google.protobuf.ByteString
350         getWriterClsNameBytes() {
351       java.lang.Object ref = writerClsName_;
352       if (ref instanceof java.lang.String) {
353         com.google.protobuf.ByteString b = 
354             com.google.protobuf.ByteString.copyFromUtf8(
355                 (java.lang.String) ref);
356         writerClsName_ = b;
357         return b;
358       } else {
359         return (com.google.protobuf.ByteString) ref;
360       }
361     }
362 
363     // optional string cell_codec_cls_name = 5;
364     public static final int CELL_CODEC_CLS_NAME_FIELD_NUMBER = 5;
365     private java.lang.Object cellCodecClsName_;
366     /**
367      * <code>optional string cell_codec_cls_name = 5;</code>
368      */
369     public boolean hasCellCodecClsName() {
370       return ((bitField0_ & 0x00000010) == 0x00000010);
371     }
372     /**
373      * <code>optional string cell_codec_cls_name = 5;</code>
374      */
375     public java.lang.String getCellCodecClsName() {
376       java.lang.Object ref = cellCodecClsName_;
377       if (ref instanceof java.lang.String) {
378         return (java.lang.String) ref;
379       } else {
380         com.google.protobuf.ByteString bs = 
381             (com.google.protobuf.ByteString) ref;
382         java.lang.String s = bs.toStringUtf8();
383         if (bs.isValidUtf8()) {
384           cellCodecClsName_ = s;
385         }
386         return s;
387       }
388     }
389     /**
390      * <code>optional string cell_codec_cls_name = 5;</code>
391      */
392     public com.google.protobuf.ByteString
393         getCellCodecClsNameBytes() {
394       java.lang.Object ref = cellCodecClsName_;
395       if (ref instanceof java.lang.String) {
396         com.google.protobuf.ByteString b = 
397             com.google.protobuf.ByteString.copyFromUtf8(
398                 (java.lang.String) ref);
399         cellCodecClsName_ = b;
400         return b;
401       } else {
402         return (com.google.protobuf.ByteString) ref;
403       }
404     }
405 
406     private void initFields() {
407       hasCompression_ = false;
408       encryptionKey_ = com.google.protobuf.ByteString.EMPTY;
409       hasTagCompression_ = false;
410       writerClsName_ = "";
411       cellCodecClsName_ = "";
412     }
413     private byte memoizedIsInitialized = -1;
414     public final boolean isInitialized() {
415       byte isInitialized = memoizedIsInitialized;
416       if (isInitialized != -1) return isInitialized == 1;
417 
418       memoizedIsInitialized = 1;
419       return true;
420     }
421 
422     public void writeTo(com.google.protobuf.CodedOutputStream output)
423                         throws java.io.IOException {
424       getSerializedSize();
425       if (((bitField0_ & 0x00000001) == 0x00000001)) {
426         output.writeBool(1, hasCompression_);
427       }
428       if (((bitField0_ & 0x00000002) == 0x00000002)) {
429         output.writeBytes(2, encryptionKey_);
430       }
431       if (((bitField0_ & 0x00000004) == 0x00000004)) {
432         output.writeBool(3, hasTagCompression_);
433       }
434       if (((bitField0_ & 0x00000008) == 0x00000008)) {
435         output.writeBytes(4, getWriterClsNameBytes());
436       }
437       if (((bitField0_ & 0x00000010) == 0x00000010)) {
438         output.writeBytes(5, getCellCodecClsNameBytes());
439       }
440       getUnknownFields().writeTo(output);
441     }
442 
443     private int memoizedSerializedSize = -1;
444     public int getSerializedSize() {
445       int size = memoizedSerializedSize;
446       if (size != -1) return size;
447 
448       size = 0;
449       if (((bitField0_ & 0x00000001) == 0x00000001)) {
450         size += com.google.protobuf.CodedOutputStream
451           .computeBoolSize(1, hasCompression_);
452       }
453       if (((bitField0_ & 0x00000002) == 0x00000002)) {
454         size += com.google.protobuf.CodedOutputStream
455           .computeBytesSize(2, encryptionKey_);
456       }
457       if (((bitField0_ & 0x00000004) == 0x00000004)) {
458         size += com.google.protobuf.CodedOutputStream
459           .computeBoolSize(3, hasTagCompression_);
460       }
461       if (((bitField0_ & 0x00000008) == 0x00000008)) {
462         size += com.google.protobuf.CodedOutputStream
463           .computeBytesSize(4, getWriterClsNameBytes());
464       }
465       if (((bitField0_ & 0x00000010) == 0x00000010)) {
466         size += com.google.protobuf.CodedOutputStream
467           .computeBytesSize(5, getCellCodecClsNameBytes());
468       }
469       size += getUnknownFields().getSerializedSize();
470       memoizedSerializedSize = size;
471       return size;
472     }
473 
474     private static final long serialVersionUID = 0L;
475     @java.lang.Override
476     protected java.lang.Object writeReplace()
477         throws java.io.ObjectStreamException {
478       return super.writeReplace();
479     }
480 
481     @java.lang.Override
482     public boolean equals(final java.lang.Object obj) {
483       if (obj == this) {
484        return true;
485       }
486       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader)) {
487         return super.equals(obj);
488       }
489       org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader other = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader) obj;
490 
491       boolean result = true;
492       result = result && (hasHasCompression() == other.hasHasCompression());
493       if (hasHasCompression()) {
494         result = result && (getHasCompression()
495             == other.getHasCompression());
496       }
497       result = result && (hasEncryptionKey() == other.hasEncryptionKey());
498       if (hasEncryptionKey()) {
499         result = result && getEncryptionKey()
500             .equals(other.getEncryptionKey());
501       }
502       result = result && (hasHasTagCompression() == other.hasHasTagCompression());
503       if (hasHasTagCompression()) {
504         result = result && (getHasTagCompression()
505             == other.getHasTagCompression());
506       }
507       result = result && (hasWriterClsName() == other.hasWriterClsName());
508       if (hasWriterClsName()) {
509         result = result && getWriterClsName()
510             .equals(other.getWriterClsName());
511       }
512       result = result && (hasCellCodecClsName() == other.hasCellCodecClsName());
513       if (hasCellCodecClsName()) {
514         result = result && getCellCodecClsName()
515             .equals(other.getCellCodecClsName());
516       }
517       result = result &&
518           getUnknownFields().equals(other.getUnknownFields());
519       return result;
520     }
521 
522     private int memoizedHashCode = 0;
523     @java.lang.Override
524     public int hashCode() {
525       if (memoizedHashCode != 0) {
526         return memoizedHashCode;
527       }
528       int hash = 41;
529       hash = (19 * hash) + getDescriptorForType().hashCode();
530       if (hasHasCompression()) {
531         hash = (37 * hash) + HAS_COMPRESSION_FIELD_NUMBER;
532         hash = (53 * hash) + hashBoolean(getHasCompression());
533       }
534       if (hasEncryptionKey()) {
535         hash = (37 * hash) + ENCRYPTION_KEY_FIELD_NUMBER;
536         hash = (53 * hash) + getEncryptionKey().hashCode();
537       }
538       if (hasHasTagCompression()) {
539         hash = (37 * hash) + HAS_TAG_COMPRESSION_FIELD_NUMBER;
540         hash = (53 * hash) + hashBoolean(getHasTagCompression());
541       }
542       if (hasWriterClsName()) {
543         hash = (37 * hash) + WRITER_CLS_NAME_FIELD_NUMBER;
544         hash = (53 * hash) + getWriterClsName().hashCode();
545       }
546       if (hasCellCodecClsName()) {
547         hash = (37 * hash) + CELL_CODEC_CLS_NAME_FIELD_NUMBER;
548         hash = (53 * hash) + getCellCodecClsName().hashCode();
549       }
550       hash = (29 * hash) + getUnknownFields().hashCode();
551       memoizedHashCode = hash;
552       return hash;
553     }
554 
555     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parseFrom(
556         com.google.protobuf.ByteString data)
557         throws com.google.protobuf.InvalidProtocolBufferException {
558       return PARSER.parseFrom(data);
559     }
560     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parseFrom(
561         com.google.protobuf.ByteString data,
562         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
563         throws com.google.protobuf.InvalidProtocolBufferException {
564       return PARSER.parseFrom(data, extensionRegistry);
565     }
566     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parseFrom(byte[] data)
567         throws com.google.protobuf.InvalidProtocolBufferException {
568       return PARSER.parseFrom(data);
569     }
570     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parseFrom(
571         byte[] data,
572         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
573         throws com.google.protobuf.InvalidProtocolBufferException {
574       return PARSER.parseFrom(data, extensionRegistry);
575     }
576     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parseFrom(java.io.InputStream input)
577         throws java.io.IOException {
578       return PARSER.parseFrom(input);
579     }
580     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parseFrom(
581         java.io.InputStream input,
582         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
583         throws java.io.IOException {
584       return PARSER.parseFrom(input, extensionRegistry);
585     }
586     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parseDelimitedFrom(java.io.InputStream input)
587         throws java.io.IOException {
588       return PARSER.parseDelimitedFrom(input);
589     }
590     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parseDelimitedFrom(
591         java.io.InputStream input,
592         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
593         throws java.io.IOException {
594       return PARSER.parseDelimitedFrom(input, extensionRegistry);
595     }
596     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parseFrom(
597         com.google.protobuf.CodedInputStream input)
598         throws java.io.IOException {
599       return PARSER.parseFrom(input);
600     }
601     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parseFrom(
602         com.google.protobuf.CodedInputStream input,
603         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
604         throws java.io.IOException {
605       return PARSER.parseFrom(input, extensionRegistry);
606     }
607 
608     public static Builder newBuilder() { return Builder.create(); }
609     public Builder newBuilderForType() { return newBuilder(); }
610     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader prototype) {
611       return newBuilder().mergeFrom(prototype);
612     }
613     public Builder toBuilder() { return newBuilder(this); }
614 
615     @java.lang.Override
616     protected Builder newBuilderForType(
617         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
618       Builder builder = new Builder(parent);
619       return builder;
620     }
621     /**
622      * Protobuf type {@code hbase.pb.WALHeader}
623      */
624     public static final class Builder extends
625         com.google.protobuf.GeneratedMessage.Builder<Builder>
626        implements org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeaderOrBuilder {
627       public static final com.google.protobuf.Descriptors.Descriptor
628           getDescriptor() {
629         return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_WALHeader_descriptor;
630       }
631 
632       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
633           internalGetFieldAccessorTable() {
634         return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_WALHeader_fieldAccessorTable
635             .ensureFieldAccessorsInitialized(
636                 org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader.Builder.class);
637       }
638 
639       // Construct using org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader.newBuilder()
640       private Builder() {
641         maybeForceBuilderInitialization();
642       }
643 
644       private Builder(
645           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
646         super(parent);
647         maybeForceBuilderInitialization();
648       }
649       private void maybeForceBuilderInitialization() {
650         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
651         }
652       }
653       private static Builder create() {
654         return new Builder();
655       }
656 
657       public Builder clear() {
658         super.clear();
659         hasCompression_ = false;
660         bitField0_ = (bitField0_ & ~0x00000001);
661         encryptionKey_ = com.google.protobuf.ByteString.EMPTY;
662         bitField0_ = (bitField0_ & ~0x00000002);
663         hasTagCompression_ = false;
664         bitField0_ = (bitField0_ & ~0x00000004);
665         writerClsName_ = "";
666         bitField0_ = (bitField0_ & ~0x00000008);
667         cellCodecClsName_ = "";
668         bitField0_ = (bitField0_ & ~0x00000010);
669         return this;
670       }
671 
672       public Builder clone() {
673         return create().mergeFrom(buildPartial());
674       }
675 
676       public com.google.protobuf.Descriptors.Descriptor
677           getDescriptorForType() {
678         return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_WALHeader_descriptor;
679       }
680 
681       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader getDefaultInstanceForType() {
682         return org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader.getDefaultInstance();
683       }
684 
685       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader build() {
686         org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader result = buildPartial();
687         if (!result.isInitialized()) {
688           throw newUninitializedMessageException(result);
689         }
690         return result;
691       }
692 
693       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader buildPartial() {
694         org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader(this);
695         int from_bitField0_ = bitField0_;
696         int to_bitField0_ = 0;
697         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
698           to_bitField0_ |= 0x00000001;
699         }
700         result.hasCompression_ = hasCompression_;
701         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
702           to_bitField0_ |= 0x00000002;
703         }
704         result.encryptionKey_ = encryptionKey_;
705         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
706           to_bitField0_ |= 0x00000004;
707         }
708         result.hasTagCompression_ = hasTagCompression_;
709         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
710           to_bitField0_ |= 0x00000008;
711         }
712         result.writerClsName_ = writerClsName_;
713         if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
714           to_bitField0_ |= 0x00000010;
715         }
716         result.cellCodecClsName_ = cellCodecClsName_;
717         result.bitField0_ = to_bitField0_;
718         onBuilt();
719         return result;
720       }
721 
722       public Builder mergeFrom(com.google.protobuf.Message other) {
723         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader) {
724           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader)other);
725         } else {
726           super.mergeFrom(other);
727           return this;
728         }
729       }
730 
731       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader other) {
732         if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader.getDefaultInstance()) return this;
733         if (other.hasHasCompression()) {
734           setHasCompression(other.getHasCompression());
735         }
736         if (other.hasEncryptionKey()) {
737           setEncryptionKey(other.getEncryptionKey());
738         }
739         if (other.hasHasTagCompression()) {
740           setHasTagCompression(other.getHasTagCompression());
741         }
742         if (other.hasWriterClsName()) {
743           bitField0_ |= 0x00000008;
744           writerClsName_ = other.writerClsName_;
745           onChanged();
746         }
747         if (other.hasCellCodecClsName()) {
748           bitField0_ |= 0x00000010;
749           cellCodecClsName_ = other.cellCodecClsName_;
750           onChanged();
751         }
752         this.mergeUnknownFields(other.getUnknownFields());
753         return this;
754       }
755 
756       public final boolean isInitialized() {
757         return true;
758       }
759 
760       public Builder mergeFrom(
761           com.google.protobuf.CodedInputStream input,
762           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
763           throws java.io.IOException {
764         org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parsedMessage = null;
765         try {
766           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
767         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
768           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader) e.getUnfinishedMessage();
769           throw e;
770         } finally {
771           if (parsedMessage != null) {
772             mergeFrom(parsedMessage);
773           }
774         }
775         return this;
776       }
777       private int bitField0_;
778 
779       // optional bool has_compression = 1;
780       private boolean hasCompression_ ;
781       /**
782        * <code>optional bool has_compression = 1;</code>
783        */
784       public boolean hasHasCompression() {
785         return ((bitField0_ & 0x00000001) == 0x00000001);
786       }
787       /**
788        * <code>optional bool has_compression = 1;</code>
789        */
790       public boolean getHasCompression() {
791         return hasCompression_;
792       }
793       /**
794        * <code>optional bool has_compression = 1;</code>
795        */
796       public Builder setHasCompression(boolean value) {
797         bitField0_ |= 0x00000001;
798         hasCompression_ = value;
799         onChanged();
800         return this;
801       }
802       /**
803        * <code>optional bool has_compression = 1;</code>
804        */
805       public Builder clearHasCompression() {
806         bitField0_ = (bitField0_ & ~0x00000001);
807         hasCompression_ = false;
808         onChanged();
809         return this;
810       }
811 
812       // optional bytes encryption_key = 2;
813       private com.google.protobuf.ByteString encryptionKey_ = com.google.protobuf.ByteString.EMPTY;
814       /**
815        * <code>optional bytes encryption_key = 2;</code>
816        */
817       public boolean hasEncryptionKey() {
818         return ((bitField0_ & 0x00000002) == 0x00000002);
819       }
820       /**
821        * <code>optional bytes encryption_key = 2;</code>
822        */
823       public com.google.protobuf.ByteString getEncryptionKey() {
824         return encryptionKey_;
825       }
826       /**
827        * <code>optional bytes encryption_key = 2;</code>
828        */
829       public Builder setEncryptionKey(com.google.protobuf.ByteString value) {
830         if (value == null) {
831     throw new NullPointerException();
832   }
833   bitField0_ |= 0x00000002;
834         encryptionKey_ = value;
835         onChanged();
836         return this;
837       }
838       /**
839        * <code>optional bytes encryption_key = 2;</code>
840        */
841       public Builder clearEncryptionKey() {
842         bitField0_ = (bitField0_ & ~0x00000002);
843         encryptionKey_ = getDefaultInstance().getEncryptionKey();
844         onChanged();
845         return this;
846       }
847 
848       // optional bool has_tag_compression = 3;
849       private boolean hasTagCompression_ ;
850       /**
851        * <code>optional bool has_tag_compression = 3;</code>
852        */
853       public boolean hasHasTagCompression() {
854         return ((bitField0_ & 0x00000004) == 0x00000004);
855       }
856       /**
857        * <code>optional bool has_tag_compression = 3;</code>
858        */
859       public boolean getHasTagCompression() {
860         return hasTagCompression_;
861       }
862       /**
863        * <code>optional bool has_tag_compression = 3;</code>
864        */
865       public Builder setHasTagCompression(boolean value) {
866         bitField0_ |= 0x00000004;
867         hasTagCompression_ = value;
868         onChanged();
869         return this;
870       }
871       /**
872        * <code>optional bool has_tag_compression = 3;</code>
873        */
874       public Builder clearHasTagCompression() {
875         bitField0_ = (bitField0_ & ~0x00000004);
876         hasTagCompression_ = false;
877         onChanged();
878         return this;
879       }
880 
881       // optional string writer_cls_name = 4;
882       private java.lang.Object writerClsName_ = "";
883       /**
884        * <code>optional string writer_cls_name = 4;</code>
885        */
886       public boolean hasWriterClsName() {
887         return ((bitField0_ & 0x00000008) == 0x00000008);
888       }
889       /**
890        * <code>optional string writer_cls_name = 4;</code>
891        */
892       public java.lang.String getWriterClsName() {
893         java.lang.Object ref = writerClsName_;
894         if (!(ref instanceof java.lang.String)) {
895           java.lang.String s = ((com.google.protobuf.ByteString) ref)
896               .toStringUtf8();
897           writerClsName_ = s;
898           return s;
899         } else {
900           return (java.lang.String) ref;
901         }
902       }
903       /**
904        * <code>optional string writer_cls_name = 4;</code>
905        */
906       public com.google.protobuf.ByteString
907           getWriterClsNameBytes() {
908         java.lang.Object ref = writerClsName_;
909         if (ref instanceof String) {
910           com.google.protobuf.ByteString b = 
911               com.google.protobuf.ByteString.copyFromUtf8(
912                   (java.lang.String) ref);
913           writerClsName_ = b;
914           return b;
915         } else {
916           return (com.google.protobuf.ByteString) ref;
917         }
918       }
919       /**
920        * <code>optional string writer_cls_name = 4;</code>
921        */
922       public Builder setWriterClsName(
923           java.lang.String value) {
924         if (value == null) {
925     throw new NullPointerException();
926   }
927   bitField0_ |= 0x00000008;
928         writerClsName_ = value;
929         onChanged();
930         return this;
931       }
932       /**
933        * <code>optional string writer_cls_name = 4;</code>
934        */
935       public Builder clearWriterClsName() {
936         bitField0_ = (bitField0_ & ~0x00000008);
937         writerClsName_ = getDefaultInstance().getWriterClsName();
938         onChanged();
939         return this;
940       }
941       /**
942        * <code>optional string writer_cls_name = 4;</code>
943        */
944       public Builder setWriterClsNameBytes(
945           com.google.protobuf.ByteString value) {
946         if (value == null) {
947     throw new NullPointerException();
948   }
949   bitField0_ |= 0x00000008;
950         writerClsName_ = value;
951         onChanged();
952         return this;
953       }
954 
955       // optional string cell_codec_cls_name = 5;
956       private java.lang.Object cellCodecClsName_ = "";
957       /**
958        * <code>optional string cell_codec_cls_name = 5;</code>
959        */
960       public boolean hasCellCodecClsName() {
961         return ((bitField0_ & 0x00000010) == 0x00000010);
962       }
963       /**
964        * <code>optional string cell_codec_cls_name = 5;</code>
965        */
966       public java.lang.String getCellCodecClsName() {
967         java.lang.Object ref = cellCodecClsName_;
968         if (!(ref instanceof java.lang.String)) {
969           java.lang.String s = ((com.google.protobuf.ByteString) ref)
970               .toStringUtf8();
971           cellCodecClsName_ = s;
972           return s;
973         } else {
974           return (java.lang.String) ref;
975         }
976       }
977       /**
978        * <code>optional string cell_codec_cls_name = 5;</code>
979        */
980       public com.google.protobuf.ByteString
981           getCellCodecClsNameBytes() {
982         java.lang.Object ref = cellCodecClsName_;
983         if (ref instanceof String) {
984           com.google.protobuf.ByteString b = 
985               com.google.protobuf.ByteString.copyFromUtf8(
986                   (java.lang.String) ref);
987           cellCodecClsName_ = b;
988           return b;
989         } else {
990           return (com.google.protobuf.ByteString) ref;
991         }
992       }
993       /**
994        * <code>optional string cell_codec_cls_name = 5;</code>
995        */
996       public Builder setCellCodecClsName(
997           java.lang.String value) {
998         if (value == null) {
999     throw new NullPointerException();
1000   }
1001   bitField0_ |= 0x00000010;
1002         cellCodecClsName_ = value;
1003         onChanged();
1004         return this;
1005       }
1006       /**
1007        * <code>optional string cell_codec_cls_name = 5;</code>
1008        */
1009       public Builder clearCellCodecClsName() {
1010         bitField0_ = (bitField0_ & ~0x00000010);
1011         cellCodecClsName_ = getDefaultInstance().getCellCodecClsName();
1012         onChanged();
1013         return this;
1014       }
1015       /**
1016        * <code>optional string cell_codec_cls_name = 5;</code>
1017        */
1018       public Builder setCellCodecClsNameBytes(
1019           com.google.protobuf.ByteString value) {
1020         if (value == null) {
1021     throw new NullPointerException();
1022   }
1023   bitField0_ |= 0x00000010;
1024         cellCodecClsName_ = value;
1025         onChanged();
1026         return this;
1027       }
1028 
1029       // @@protoc_insertion_point(builder_scope:hbase.pb.WALHeader)
1030     }
1031 
1032     static {
1033       defaultInstance = new WALHeader(true);
1034       defaultInstance.initFields();
1035     }
1036 
1037     // @@protoc_insertion_point(class_scope:hbase.pb.WALHeader)
1038   }
1039 
1040   public interface WALKeyOrBuilder
1041       extends com.google.protobuf.MessageOrBuilder {
1042 
1043     // required bytes encoded_region_name = 1;
1044     /**
1045      * <code>required bytes encoded_region_name = 1;</code>
1046      */
1047     boolean hasEncodedRegionName();
1048     /**
1049      * <code>required bytes encoded_region_name = 1;</code>
1050      */
1051     com.google.protobuf.ByteString getEncodedRegionName();
1052 
1053     // required bytes table_name = 2;
1054     /**
1055      * <code>required bytes table_name = 2;</code>
1056      */
1057     boolean hasTableName();
1058     /**
1059      * <code>required bytes table_name = 2;</code>
1060      */
1061     com.google.protobuf.ByteString getTableName();
1062 
1063     // required uint64 log_sequence_number = 3;
1064     /**
1065      * <code>required uint64 log_sequence_number = 3;</code>
1066      */
1067     boolean hasLogSequenceNumber();
1068     /**
1069      * <code>required uint64 log_sequence_number = 3;</code>
1070      */
1071     long getLogSequenceNumber();
1072 
1073     // required uint64 write_time = 4;
1074     /**
1075      * <code>required uint64 write_time = 4;</code>
1076      */
1077     boolean hasWriteTime();
1078     /**
1079      * <code>required uint64 write_time = 4;</code>
1080      */
1081     long getWriteTime();
1082 
1083     // optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];
1084     /**
1085      * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code>
1086      *
1087      * <pre>
1088      *
1089      *This parameter is deprecated in favor of clusters which 
1090      *contains the list of clusters that have consumed the change.
1091      *It is retained so that the log created by earlier releases (0.94) 
1092      *can be read by the newer releases.
1093      * </pre>
1094      */
1095     @java.lang.Deprecated boolean hasClusterId();
1096     /**
1097      * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code>
1098      *
1099      * <pre>
1100      *
1101      *This parameter is deprecated in favor of clusters which 
1102      *contains the list of clusters that have consumed the change.
1103      *It is retained so that the log created by earlier releases (0.94) 
1104      *can be read by the newer releases.
1105      * </pre>
1106      */
1107     @java.lang.Deprecated org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID getClusterId();
1108     /**
1109      * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code>
1110      *
1111      * <pre>
1112      *
1113      *This parameter is deprecated in favor of clusters which 
1114      *contains the list of clusters that have consumed the change.
1115      *It is retained so that the log created by earlier releases (0.94) 
1116      *can be read by the newer releases.
1117      * </pre>
1118      */
1119     @java.lang.Deprecated org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder getClusterIdOrBuilder();
1120 
1121     // repeated .hbase.pb.FamilyScope scopes = 6;
1122     /**
1123      * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code>
1124      */
1125     java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope> 
1126         getScopesList();
1127     /**
1128      * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code>
1129      */
1130     org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope getScopes(int index);
1131     /**
1132      * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code>
1133      */
1134     int getScopesCount();
1135     /**
1136      * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code>
1137      */
1138     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScopeOrBuilder> 
1139         getScopesOrBuilderList();
1140     /**
1141      * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code>
1142      */
1143     org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScopeOrBuilder getScopesOrBuilder(
1144         int index);
1145 
1146     // optional uint32 following_kv_count = 7;
1147     /**
1148      * <code>optional uint32 following_kv_count = 7;</code>
1149      */
1150     boolean hasFollowingKvCount();
1151     /**
1152      * <code>optional uint32 following_kv_count = 7;</code>
1153      */
1154     int getFollowingKvCount();
1155 
1156     // repeated .hbase.pb.UUID cluster_ids = 8;
1157     /**
1158      * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
1159      *
1160      * <pre>
1161      *
1162      *This field contains the list of clusters that have
1163      *consumed the change
1164      * </pre>
1165      */
1166     java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID> 
1167         getClusterIdsList();
1168     /**
1169      * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
1170      *
1171      * <pre>
1172      *
1173      *This field contains the list of clusters that have
1174      *consumed the change
1175      * </pre>
1176      */
1177     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID getClusterIds(int index);
1178     /**
1179      * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
1180      *
1181      * <pre>
1182      *
1183      *This field contains the list of clusters that have
1184      *consumed the change
1185      * </pre>
1186      */
1187     int getClusterIdsCount();
1188     /**
1189      * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
1190      *
1191      * <pre>
1192      *
1193      *This field contains the list of clusters that have
1194      *consumed the change
1195      * </pre>
1196      */
1197     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder> 
1198         getClusterIdsOrBuilderList();
1199     /**
1200      * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
1201      *
1202      * <pre>
1203      *
1204      *This field contains the list of clusters that have
1205      *consumed the change
1206      * </pre>
1207      */
1208     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder getClusterIdsOrBuilder(
1209         int index);
1210 
1211     // optional uint64 nonceGroup = 9;
1212     /**
1213      * <code>optional uint64 nonceGroup = 9;</code>
1214      */
1215     boolean hasNonceGroup();
1216     /**
1217      * <code>optional uint64 nonceGroup = 9;</code>
1218      */
1219     long getNonceGroup();
1220 
1221     // optional uint64 nonce = 10;
1222     /**
1223      * <code>optional uint64 nonce = 10;</code>
1224      */
1225     boolean hasNonce();
1226     /**
1227      * <code>optional uint64 nonce = 10;</code>
1228      */
1229     long getNonce();
1230 
1231     // optional uint64 orig_sequence_number = 11;
1232     /**
1233      * <code>optional uint64 orig_sequence_number = 11;</code>
1234      */
1235     boolean hasOrigSequenceNumber();
1236     /**
1237      * <code>optional uint64 orig_sequence_number = 11;</code>
1238      */
1239     long getOrigSequenceNumber();
1240   }
1241   /**
1242    * Protobuf type {@code hbase.pb.WALKey}
1243    *
1244    * <pre>
1245    *
1246    * Protocol buffer version of WALKey; see WALKey comment, not really a key but WALEdit header
1247    * for some KVs
1248    * </pre>
1249    */
1250   public static final class WALKey extends
1251       com.google.protobuf.GeneratedMessage
1252       implements WALKeyOrBuilder {
1253     // Use WALKey.newBuilder() to construct.
1254     private WALKey(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
1255       super(builder);
1256       this.unknownFields = builder.getUnknownFields();
1257     }
1258     private WALKey(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
1259 
1260     private static final WALKey defaultInstance;
1261     public static WALKey getDefaultInstance() {
1262       return defaultInstance;
1263     }
1264 
1265     public WALKey getDefaultInstanceForType() {
1266       return defaultInstance;
1267     }
1268 
1269     private final com.google.protobuf.UnknownFieldSet unknownFields;
1270     @java.lang.Override
1271     public final com.google.protobuf.UnknownFieldSet
1272         getUnknownFields() {
1273       return this.unknownFields;
1274     }
1275     private WALKey(
1276         com.google.protobuf.CodedInputStream input,
1277         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1278         throws com.google.protobuf.InvalidProtocolBufferException {
1279       initFields();
1280       int mutable_bitField0_ = 0;
1281       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
1282           com.google.protobuf.UnknownFieldSet.newBuilder();
1283       try {
1284         boolean done = false;
1285         while (!done) {
1286           int tag = input.readTag();
1287           switch (tag) {
1288             case 0:
1289               done = true;
1290               break;
1291             default: {
1292               if (!parseUnknownField(input, unknownFields,
1293                                      extensionRegistry, tag)) {
1294                 done = true;
1295               }
1296               break;
1297             }
1298             case 10: {
1299               bitField0_ |= 0x00000001;
1300               encodedRegionName_ = input.readBytes();
1301               break;
1302             }
1303             case 18: {
1304               bitField0_ |= 0x00000002;
1305               tableName_ = input.readBytes();
1306               break;
1307             }
1308             case 24: {
1309               bitField0_ |= 0x00000004;
1310               logSequenceNumber_ = input.readUInt64();
1311               break;
1312             }
1313             case 32: {
1314               bitField0_ |= 0x00000008;
1315               writeTime_ = input.readUInt64();
1316               break;
1317             }
1318             case 42: {
1319               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder subBuilder = null;
1320               if (((bitField0_ & 0x00000010) == 0x00000010)) {
1321                 subBuilder = clusterId_.toBuilder();
1322               }
1323               clusterId_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.PARSER, extensionRegistry);
1324               if (subBuilder != null) {
1325                 subBuilder.mergeFrom(clusterId_);
1326                 clusterId_ = subBuilder.buildPartial();
1327               }
1328               bitField0_ |= 0x00000010;
1329               break;
1330             }
1331             case 50: {
1332               if (!((mutable_bitField0_ & 0x00000020) == 0x00000020)) {
1333                 scopes_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope>();
1334                 mutable_bitField0_ |= 0x00000020;
1335               }
1336               scopes_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.PARSER, extensionRegistry));
1337               break;
1338             }
1339             case 56: {
1340               bitField0_ |= 0x00000020;
1341               followingKvCount_ = input.readUInt32();
1342               break;
1343             }
1344             case 66: {
1345               if (!((mutable_bitField0_ & 0x00000080) == 0x00000080)) {
1346                 clusterIds_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID>();
1347                 mutable_bitField0_ |= 0x00000080;
1348               }
1349               clusterIds_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.PARSER, extensionRegistry));
1350               break;
1351             }
1352             case 72: {
1353               bitField0_ |= 0x00000040;
1354               nonceGroup_ = input.readUInt64();
1355               break;
1356             }
1357             case 80: {
1358               bitField0_ |= 0x00000080;
1359               nonce_ = input.readUInt64();
1360               break;
1361             }
1362             case 88: {
1363               bitField0_ |= 0x00000100;
1364               origSequenceNumber_ = input.readUInt64();
1365               break;
1366             }
1367           }
1368         }
1369       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1370         throw e.setUnfinishedMessage(this);
1371       } catch (java.io.IOException e) {
1372         throw new com.google.protobuf.InvalidProtocolBufferException(
1373             e.getMessage()).setUnfinishedMessage(this);
1374       } finally {
1375         if (((mutable_bitField0_ & 0x00000020) == 0x00000020)) {
1376           scopes_ = java.util.Collections.unmodifiableList(scopes_);
1377         }
1378         if (((mutable_bitField0_ & 0x00000080) == 0x00000080)) {
1379           clusterIds_ = java.util.Collections.unmodifiableList(clusterIds_);
1380         }
1381         this.unknownFields = unknownFields.build();
1382         makeExtensionsImmutable();
1383       }
1384     }
1385     public static final com.google.protobuf.Descriptors.Descriptor
1386         getDescriptor() {
1387       return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_WALKey_descriptor;
1388     }
1389 
1390     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1391         internalGetFieldAccessorTable() {
1392       return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_WALKey_fieldAccessorTable
1393           .ensureFieldAccessorsInitialized(
1394               org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.Builder.class);
1395     }
1396 
1397     public static com.google.protobuf.Parser<WALKey> PARSER =
1398         new com.google.protobuf.AbstractParser<WALKey>() {
1399       public WALKey parsePartialFrom(
1400           com.google.protobuf.CodedInputStream input,
1401           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1402           throws com.google.protobuf.InvalidProtocolBufferException {
1403         return new WALKey(input, extensionRegistry);
1404       }
1405     };
1406 
1407     @java.lang.Override
1408     public com.google.protobuf.Parser<WALKey> getParserForType() {
1409       return PARSER;
1410     }
1411 
1412     private int bitField0_;
1413     // required bytes encoded_region_name = 1;
1414     public static final int ENCODED_REGION_NAME_FIELD_NUMBER = 1;
1415     private com.google.protobuf.ByteString encodedRegionName_;
1416     /**
1417      * <code>required bytes encoded_region_name = 1;</code>
1418      */
1419     public boolean hasEncodedRegionName() {
1420       return ((bitField0_ & 0x00000001) == 0x00000001);
1421     }
1422     /**
1423      * <code>required bytes encoded_region_name = 1;</code>
1424      */
1425     public com.google.protobuf.ByteString getEncodedRegionName() {
1426       return encodedRegionName_;
1427     }
1428 
1429     // required bytes table_name = 2;
1430     public static final int TABLE_NAME_FIELD_NUMBER = 2;
1431     private com.google.protobuf.ByteString tableName_;
1432     /**
1433      * <code>required bytes table_name = 2;</code>
1434      */
1435     public boolean hasTableName() {
1436       return ((bitField0_ & 0x00000002) == 0x00000002);
1437     }
1438     /**
1439      * <code>required bytes table_name = 2;</code>
1440      */
1441     public com.google.protobuf.ByteString getTableName() {
1442       return tableName_;
1443     }
1444 
1445     // required uint64 log_sequence_number = 3;
1446     public static final int LOG_SEQUENCE_NUMBER_FIELD_NUMBER = 3;
1447     private long logSequenceNumber_;
1448     /**
1449      * <code>required uint64 log_sequence_number = 3;</code>
1450      */
1451     public boolean hasLogSequenceNumber() {
1452       return ((bitField0_ & 0x00000004) == 0x00000004);
1453     }
1454     /**
1455      * <code>required uint64 log_sequence_number = 3;</code>
1456      */
1457     public long getLogSequenceNumber() {
1458       return logSequenceNumber_;
1459     }
1460 
1461     // required uint64 write_time = 4;
1462     public static final int WRITE_TIME_FIELD_NUMBER = 4;
1463     private long writeTime_;
1464     /**
1465      * <code>required uint64 write_time = 4;</code>
1466      */
1467     public boolean hasWriteTime() {
1468       return ((bitField0_ & 0x00000008) == 0x00000008);
1469     }
1470     /**
1471      * <code>required uint64 write_time = 4;</code>
1472      */
1473     public long getWriteTime() {
1474       return writeTime_;
1475     }
1476 
1477     // optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];
1478     public static final int CLUSTER_ID_FIELD_NUMBER = 5;
1479     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID clusterId_;
1480     /**
1481      * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code>
1482      *
1483      * <pre>
1484      *
1485      *This parameter is deprecated in favor of clusters which 
1486      *contains the list of clusters that have consumed the change.
1487      *It is retained so that the log created by earlier releases (0.94) 
1488      *can be read by the newer releases.
1489      * </pre>
1490      */
1491     @java.lang.Deprecated public boolean hasClusterId() {
1492       return ((bitField0_ & 0x00000010) == 0x00000010);
1493     }
1494     /**
1495      * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code>
1496      *
1497      * <pre>
1498      *
1499      *This parameter is deprecated in favor of clusters which 
1500      *contains the list of clusters that have consumed the change.
1501      *It is retained so that the log created by earlier releases (0.94) 
1502      *can be read by the newer releases.
1503      * </pre>
1504      */
1505     @java.lang.Deprecated public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID getClusterId() {
1506       return clusterId_;
1507     }
1508     /**
1509      * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code>
1510      *
1511      * <pre>
1512      *
1513      *This parameter is deprecated in favor of clusters which 
1514      *contains the list of clusters that have consumed the change.
1515      *It is retained so that the log created by earlier releases (0.94) 
1516      *can be read by the newer releases.
1517      * </pre>
1518      */
1519     @java.lang.Deprecated public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder getClusterIdOrBuilder() {
1520       return clusterId_;
1521     }
1522 
1523     // repeated .hbase.pb.FamilyScope scopes = 6;
1524     public static final int SCOPES_FIELD_NUMBER = 6;
1525     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope> scopes_;
1526     /**
1527      * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code>
1528      */
1529     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope> getScopesList() {
1530       return scopes_;
1531     }
1532     /**
1533      * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code>
1534      */
1535     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScopeOrBuilder> 
1536         getScopesOrBuilderList() {
1537       return scopes_;
1538     }
1539     /**
1540      * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code>
1541      */
1542     public int getScopesCount() {
1543       return scopes_.size();
1544     }
1545     /**
1546      * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code>
1547      */
1548     public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope getScopes(int index) {
1549       return scopes_.get(index);
1550     }
1551     /**
1552      * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code>
1553      */
1554     public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScopeOrBuilder getScopesOrBuilder(
1555         int index) {
1556       return scopes_.get(index);
1557     }
1558 
1559     // optional uint32 following_kv_count = 7;
1560     public static final int FOLLOWING_KV_COUNT_FIELD_NUMBER = 7;
1561     private int followingKvCount_;
1562     /**
1563      * <code>optional uint32 following_kv_count = 7;</code>
1564      */
1565     public boolean hasFollowingKvCount() {
1566       return ((bitField0_ & 0x00000020) == 0x00000020);
1567     }
1568     /**
1569      * <code>optional uint32 following_kv_count = 7;</code>
1570      */
1571     public int getFollowingKvCount() {
1572       return followingKvCount_;
1573     }
1574 
1575     // repeated .hbase.pb.UUID cluster_ids = 8;
1576     public static final int CLUSTER_IDS_FIELD_NUMBER = 8;
1577     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID> clusterIds_;
1578     /**
1579      * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
1580      *
1581      * <pre>
1582      *
1583      *This field contains the list of clusters that have
1584      *consumed the change
1585      * </pre>
1586      */
1587     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID> getClusterIdsList() {
1588       return clusterIds_;
1589     }
1590     /**
1591      * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
1592      *
1593      * <pre>
1594      *
1595      *This field contains the list of clusters that have
1596      *consumed the change
1597      * </pre>
1598      */
1599     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder> 
1600         getClusterIdsOrBuilderList() {
1601       return clusterIds_;
1602     }
1603     /**
1604      * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
1605      *
1606      * <pre>
1607      *
1608      *This field contains the list of clusters that have
1609      *consumed the change
1610      * </pre>
1611      */
1612     public int getClusterIdsCount() {
1613       return clusterIds_.size();
1614     }
1615     /**
1616      * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
1617      *
1618      * <pre>
1619      *
1620      *This field contains the list of clusters that have
1621      *consumed the change
1622      * </pre>
1623      */
1624     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID getClusterIds(int index) {
1625       return clusterIds_.get(index);
1626     }
1627     /**
1628      * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
1629      *
1630      * <pre>
1631      *
1632      *This field contains the list of clusters that have
1633      *consumed the change
1634      * </pre>
1635      */
1636     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder getClusterIdsOrBuilder(
1637         int index) {
1638       return clusterIds_.get(index);
1639     }
1640 
1641     // optional uint64 nonceGroup = 9;
1642     public static final int NONCEGROUP_FIELD_NUMBER = 9;
1643     private long nonceGroup_;
1644     /**
1645      * <code>optional uint64 nonceGroup = 9;</code>
1646      */
1647     public boolean hasNonceGroup() {
1648       return ((bitField0_ & 0x00000040) == 0x00000040);
1649     }
1650     /**
1651      * <code>optional uint64 nonceGroup = 9;</code>
1652      */
1653     public long getNonceGroup() {
1654       return nonceGroup_;
1655     }
1656 
1657     // optional uint64 nonce = 10;
1658     public static final int NONCE_FIELD_NUMBER = 10;
1659     private long nonce_;
1660     /**
1661      * <code>optional uint64 nonce = 10;</code>
1662      */
1663     public boolean hasNonce() {
1664       return ((bitField0_ & 0x00000080) == 0x00000080);
1665     }
1666     /**
1667      * <code>optional uint64 nonce = 10;</code>
1668      */
1669     public long getNonce() {
1670       return nonce_;
1671     }
1672 
1673     // optional uint64 orig_sequence_number = 11;
1674     public static final int ORIG_SEQUENCE_NUMBER_FIELD_NUMBER = 11;
1675     private long origSequenceNumber_;
1676     /**
1677      * <code>optional uint64 orig_sequence_number = 11;</code>
1678      */
1679     public boolean hasOrigSequenceNumber() {
1680       return ((bitField0_ & 0x00000100) == 0x00000100);
1681     }
1682     /**
1683      * <code>optional uint64 orig_sequence_number = 11;</code>
1684      */
1685     public long getOrigSequenceNumber() {
1686       return origSequenceNumber_;
1687     }
1688 
1689     private void initFields() {
1690       encodedRegionName_ = com.google.protobuf.ByteString.EMPTY;
1691       tableName_ = com.google.protobuf.ByteString.EMPTY;
1692       logSequenceNumber_ = 0L;
1693       writeTime_ = 0L;
1694       clusterId_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.getDefaultInstance();
1695       scopes_ = java.util.Collections.emptyList();
1696       followingKvCount_ = 0;
1697       clusterIds_ = java.util.Collections.emptyList();
1698       nonceGroup_ = 0L;
1699       nonce_ = 0L;
1700       origSequenceNumber_ = 0L;
1701     }
1702     private byte memoizedIsInitialized = -1;
1703     public final boolean isInitialized() {
1704       byte isInitialized = memoizedIsInitialized;
1705       if (isInitialized != -1) return isInitialized == 1;
1706 
1707       if (!hasEncodedRegionName()) {
1708         memoizedIsInitialized = 0;
1709         return false;
1710       }
1711       if (!hasTableName()) {
1712         memoizedIsInitialized = 0;
1713         return false;
1714       }
1715       if (!hasLogSequenceNumber()) {
1716         memoizedIsInitialized = 0;
1717         return false;
1718       }
1719       if (!hasWriteTime()) {
1720         memoizedIsInitialized = 0;
1721         return false;
1722       }
1723       if (hasClusterId()) {
1724         if (!getClusterId().isInitialized()) {
1725           memoizedIsInitialized = 0;
1726           return false;
1727         }
1728       }
1729       for (int i = 0; i < getScopesCount(); i++) {
1730         if (!getScopes(i).isInitialized()) {
1731           memoizedIsInitialized = 0;
1732           return false;
1733         }
1734       }
1735       for (int i = 0; i < getClusterIdsCount(); i++) {
1736         if (!getClusterIds(i).isInitialized()) {
1737           memoizedIsInitialized = 0;
1738           return false;
1739         }
1740       }
1741       memoizedIsInitialized = 1;
1742       return true;
1743     }
1744 
1745     public void writeTo(com.google.protobuf.CodedOutputStream output)
1746                         throws java.io.IOException {
1747       getSerializedSize();
1748       if (((bitField0_ & 0x00000001) == 0x00000001)) {
1749         output.writeBytes(1, encodedRegionName_);
1750       }
1751       if (((bitField0_ & 0x00000002) == 0x00000002)) {
1752         output.writeBytes(2, tableName_);
1753       }
1754       if (((bitField0_ & 0x00000004) == 0x00000004)) {
1755         output.writeUInt64(3, logSequenceNumber_);
1756       }
1757       if (((bitField0_ & 0x00000008) == 0x00000008)) {
1758         output.writeUInt64(4, writeTime_);
1759       }
1760       if (((bitField0_ & 0x00000010) == 0x00000010)) {
1761         output.writeMessage(5, clusterId_);
1762       }
1763       for (int i = 0; i < scopes_.size(); i++) {
1764         output.writeMessage(6, scopes_.get(i));
1765       }
1766       if (((bitField0_ & 0x00000020) == 0x00000020)) {
1767         output.writeUInt32(7, followingKvCount_);
1768       }
1769       for (int i = 0; i < clusterIds_.size(); i++) {
1770         output.writeMessage(8, clusterIds_.get(i));
1771       }
1772       if (((bitField0_ & 0x00000040) == 0x00000040)) {
1773         output.writeUInt64(9, nonceGroup_);
1774       }
1775       if (((bitField0_ & 0x00000080) == 0x00000080)) {
1776         output.writeUInt64(10, nonce_);
1777       }
1778       if (((bitField0_ & 0x00000100) == 0x00000100)) {
1779         output.writeUInt64(11, origSequenceNumber_);
1780       }
1781       getUnknownFields().writeTo(output);
1782     }
1783 
1784     private int memoizedSerializedSize = -1;
1785     public int getSerializedSize() {
1786       int size = memoizedSerializedSize;
1787       if (size != -1) return size;
1788 
1789       size = 0;
1790       if (((bitField0_ & 0x00000001) == 0x00000001)) {
1791         size += com.google.protobuf.CodedOutputStream
1792           .computeBytesSize(1, encodedRegionName_);
1793       }
1794       if (((bitField0_ & 0x00000002) == 0x00000002)) {
1795         size += com.google.protobuf.CodedOutputStream
1796           .computeBytesSize(2, tableName_);
1797       }
1798       if (((bitField0_ & 0x00000004) == 0x00000004)) {
1799         size += com.google.protobuf.CodedOutputStream
1800           .computeUInt64Size(3, logSequenceNumber_);
1801       }
1802       if (((bitField0_ & 0x00000008) == 0x00000008)) {
1803         size += com.google.protobuf.CodedOutputStream
1804           .computeUInt64Size(4, writeTime_);
1805       }
1806       if (((bitField0_ & 0x00000010) == 0x00000010)) {
1807         size += com.google.protobuf.CodedOutputStream
1808           .computeMessageSize(5, clusterId_);
1809       }
1810       for (int i = 0; i < scopes_.size(); i++) {
1811         size += com.google.protobuf.CodedOutputStream
1812           .computeMessageSize(6, scopes_.get(i));
1813       }
1814       if (((bitField0_ & 0x00000020) == 0x00000020)) {
1815         size += com.google.protobuf.CodedOutputStream
1816           .computeUInt32Size(7, followingKvCount_);
1817       }
1818       for (int i = 0; i < clusterIds_.size(); i++) {
1819         size += com.google.protobuf.CodedOutputStream
1820           .computeMessageSize(8, clusterIds_.get(i));
1821       }
1822       if (((bitField0_ & 0x00000040) == 0x00000040)) {
1823         size += com.google.protobuf.CodedOutputStream
1824           .computeUInt64Size(9, nonceGroup_);
1825       }
1826       if (((bitField0_ & 0x00000080) == 0x00000080)) {
1827         size += com.google.protobuf.CodedOutputStream
1828           .computeUInt64Size(10, nonce_);
1829       }
1830       if (((bitField0_ & 0x00000100) == 0x00000100)) {
1831         size += com.google.protobuf.CodedOutputStream
1832           .computeUInt64Size(11, origSequenceNumber_);
1833       }
1834       size += getUnknownFields().getSerializedSize();
1835       memoizedSerializedSize = size;
1836       return size;
1837     }
1838 
1839     private static final long serialVersionUID = 0L;
1840     @java.lang.Override
1841     protected java.lang.Object writeReplace()
1842         throws java.io.ObjectStreamException {
1843       return super.writeReplace();
1844     }
1845 
1846     @java.lang.Override
1847     public boolean equals(final java.lang.Object obj) {
1848       if (obj == this) {
1849        return true;
1850       }
1851       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey)) {
1852         return super.equals(obj);
1853       }
1854       org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey other = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey) obj;
1855 
1856       boolean result = true;
1857       result = result && (hasEncodedRegionName() == other.hasEncodedRegionName());
1858       if (hasEncodedRegionName()) {
1859         result = result && getEncodedRegionName()
1860             .equals(other.getEncodedRegionName());
1861       }
1862       result = result && (hasTableName() == other.hasTableName());
1863       if (hasTableName()) {
1864         result = result && getTableName()
1865             .equals(other.getTableName());
1866       }
1867       result = result && (hasLogSequenceNumber() == other.hasLogSequenceNumber());
1868       if (hasLogSequenceNumber()) {
1869         result = result && (getLogSequenceNumber()
1870             == other.getLogSequenceNumber());
1871       }
1872       result = result && (hasWriteTime() == other.hasWriteTime());
1873       if (hasWriteTime()) {
1874         result = result && (getWriteTime()
1875             == other.getWriteTime());
1876       }
1877       result = result && (hasClusterId() == other.hasClusterId());
1878       if (hasClusterId()) {
1879         result = result && getClusterId()
1880             .equals(other.getClusterId());
1881       }
1882       result = result && getScopesList()
1883           .equals(other.getScopesList());
1884       result = result && (hasFollowingKvCount() == other.hasFollowingKvCount());
1885       if (hasFollowingKvCount()) {
1886         result = result && (getFollowingKvCount()
1887             == other.getFollowingKvCount());
1888       }
1889       result = result && getClusterIdsList()
1890           .equals(other.getClusterIdsList());
1891       result = result && (hasNonceGroup() == other.hasNonceGroup());
1892       if (hasNonceGroup()) {
1893         result = result && (getNonceGroup()
1894             == other.getNonceGroup());
1895       }
1896       result = result && (hasNonce() == other.hasNonce());
1897       if (hasNonce()) {
1898         result = result && (getNonce()
1899             == other.getNonce());
1900       }
1901       result = result && (hasOrigSequenceNumber() == other.hasOrigSequenceNumber());
1902       if (hasOrigSequenceNumber()) {
1903         result = result && (getOrigSequenceNumber()
1904             == other.getOrigSequenceNumber());
1905       }
1906       result = result &&
1907           getUnknownFields().equals(other.getUnknownFields());
1908       return result;
1909     }
1910 
1911     private int memoizedHashCode = 0;
1912     @java.lang.Override
1913     public int hashCode() {
1914       if (memoizedHashCode != 0) {
1915         return memoizedHashCode;
1916       }
1917       int hash = 41;
1918       hash = (19 * hash) + getDescriptorForType().hashCode();
1919       if (hasEncodedRegionName()) {
1920         hash = (37 * hash) + ENCODED_REGION_NAME_FIELD_NUMBER;
1921         hash = (53 * hash) + getEncodedRegionName().hashCode();
1922       }
1923       if (hasTableName()) {
1924         hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER;
1925         hash = (53 * hash) + getTableName().hashCode();
1926       }
1927       if (hasLogSequenceNumber()) {
1928         hash = (37 * hash) + LOG_SEQUENCE_NUMBER_FIELD_NUMBER;
1929         hash = (53 * hash) + hashLong(getLogSequenceNumber());
1930       }
1931       if (hasWriteTime()) {
1932         hash = (37 * hash) + WRITE_TIME_FIELD_NUMBER;
1933         hash = (53 * hash) + hashLong(getWriteTime());
1934       }
1935       if (hasClusterId()) {
1936         hash = (37 * hash) + CLUSTER_ID_FIELD_NUMBER;
1937         hash = (53 * hash) + getClusterId().hashCode();
1938       }
1939       if (getScopesCount() > 0) {
1940         hash = (37 * hash) + SCOPES_FIELD_NUMBER;
1941         hash = (53 * hash) + getScopesList().hashCode();
1942       }
1943       if (hasFollowingKvCount()) {
1944         hash = (37 * hash) + FOLLOWING_KV_COUNT_FIELD_NUMBER;
1945         hash = (53 * hash) + getFollowingKvCount();
1946       }
1947       if (getClusterIdsCount() > 0) {
1948         hash = (37 * hash) + CLUSTER_IDS_FIELD_NUMBER;
1949         hash = (53 * hash) + getClusterIdsList().hashCode();
1950       }
1951       if (hasNonceGroup()) {
1952         hash = (37 * hash) + NONCEGROUP_FIELD_NUMBER;
1953         hash = (53 * hash) + hashLong(getNonceGroup());
1954       }
1955       if (hasNonce()) {
1956         hash = (37 * hash) + NONCE_FIELD_NUMBER;
1957         hash = (53 * hash) + hashLong(getNonce());
1958       }
1959       if (hasOrigSequenceNumber()) {
1960         hash = (37 * hash) + ORIG_SEQUENCE_NUMBER_FIELD_NUMBER;
1961         hash = (53 * hash) + hashLong(getOrigSequenceNumber());
1962       }
1963       hash = (29 * hash) + getUnknownFields().hashCode();
1964       memoizedHashCode = hash;
1965       return hash;
1966     }
1967 
1968     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseFrom(
1969         com.google.protobuf.ByteString data)
1970         throws com.google.protobuf.InvalidProtocolBufferException {
1971       return PARSER.parseFrom(data);
1972     }
1973     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseFrom(
1974         com.google.protobuf.ByteString data,
1975         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1976         throws com.google.protobuf.InvalidProtocolBufferException {
1977       return PARSER.parseFrom(data, extensionRegistry);
1978     }
1979     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseFrom(byte[] data)
1980         throws com.google.protobuf.InvalidProtocolBufferException {
1981       return PARSER.parseFrom(data);
1982     }
1983     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseFrom(
1984         byte[] data,
1985         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1986         throws com.google.protobuf.InvalidProtocolBufferException {
1987       return PARSER.parseFrom(data, extensionRegistry);
1988     }
1989     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseFrom(java.io.InputStream input)
1990         throws java.io.IOException {
1991       return PARSER.parseFrom(input);
1992     }
1993     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseFrom(
1994         java.io.InputStream input,
1995         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1996         throws java.io.IOException {
1997       return PARSER.parseFrom(input, extensionRegistry);
1998     }
1999     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseDelimitedFrom(java.io.InputStream input)
2000         throws java.io.IOException {
2001       return PARSER.parseDelimitedFrom(input);
2002     }
2003     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseDelimitedFrom(
2004         java.io.InputStream input,
2005         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2006         throws java.io.IOException {
2007       return PARSER.parseDelimitedFrom(input, extensionRegistry);
2008     }
2009     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseFrom(
2010         com.google.protobuf.CodedInputStream input)
2011         throws java.io.IOException {
2012       return PARSER.parseFrom(input);
2013     }
2014     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseFrom(
2015         com.google.protobuf.CodedInputStream input,
2016         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2017         throws java.io.IOException {
2018       return PARSER.parseFrom(input, extensionRegistry);
2019     }
2020 
2021     public static Builder newBuilder() { return Builder.create(); }
2022     public Builder newBuilderForType() { return newBuilder(); }
2023     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey prototype) {
2024       return newBuilder().mergeFrom(prototype);
2025     }
2026     public Builder toBuilder() { return newBuilder(this); }
2027 
2028     @java.lang.Override
2029     protected Builder newBuilderForType(
2030         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2031       Builder builder = new Builder(parent);
2032       return builder;
2033     }
2034     /**
2035      * Protobuf type {@code hbase.pb.WALKey}
2036      *
2037      * <pre>
2038      *
2039      * Protocol buffer version of WALKey; see WALKey comment, not really a key but WALEdit header
2040      * for some KVs
2041      * </pre>
2042      */
2043     public static final class Builder extends
2044         com.google.protobuf.GeneratedMessage.Builder<Builder>
2045        implements org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKeyOrBuilder {
2046       public static final com.google.protobuf.Descriptors.Descriptor
2047           getDescriptor() {
2048         return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_WALKey_descriptor;
2049       }
2050 
2051       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
2052           internalGetFieldAccessorTable() {
2053         return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_WALKey_fieldAccessorTable
2054             .ensureFieldAccessorsInitialized(
2055                 org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.Builder.class);
2056       }
2057 
2058       // Construct using org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.newBuilder()
2059       private Builder() {
2060         maybeForceBuilderInitialization();
2061       }
2062 
2063       private Builder(
2064           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2065         super(parent);
2066         maybeForceBuilderInitialization();
2067       }
2068       private void maybeForceBuilderInitialization() {
2069         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
2070           getClusterIdFieldBuilder();
2071           getScopesFieldBuilder();
2072           getClusterIdsFieldBuilder();
2073         }
2074       }
2075       private static Builder create() {
2076         return new Builder();
2077       }
2078 
2079       public Builder clear() {
2080         super.clear();
2081         encodedRegionName_ = com.google.protobuf.ByteString.EMPTY;
2082         bitField0_ = (bitField0_ & ~0x00000001);
2083         tableName_ = com.google.protobuf.ByteString.EMPTY;
2084         bitField0_ = (bitField0_ & ~0x00000002);
2085         logSequenceNumber_ = 0L;
2086         bitField0_ = (bitField0_ & ~0x00000004);
2087         writeTime_ = 0L;
2088         bitField0_ = (bitField0_ & ~0x00000008);
2089         if (clusterIdBuilder_ == null) {
2090           clusterId_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.getDefaultInstance();
2091         } else {
2092           clusterIdBuilder_.clear();
2093         }
2094         bitField0_ = (bitField0_ & ~0x00000010);
2095         if (scopesBuilder_ == null) {
2096           scopes_ = java.util.Collections.emptyList();
2097           bitField0_ = (bitField0_ & ~0x00000020);
2098         } else {
2099           scopesBuilder_.clear();
2100         }
2101         followingKvCount_ = 0;
2102         bitField0_ = (bitField0_ & ~0x00000040);
2103         if (clusterIdsBuilder_ == null) {
2104           clusterIds_ = java.util.Collections.emptyList();
2105           bitField0_ = (bitField0_ & ~0x00000080);
2106         } else {
2107           clusterIdsBuilder_.clear();
2108         }
2109         nonceGroup_ = 0L;
2110         bitField0_ = (bitField0_ & ~0x00000100);
2111         nonce_ = 0L;
2112         bitField0_ = (bitField0_ & ~0x00000200);
2113         origSequenceNumber_ = 0L;
2114         bitField0_ = (bitField0_ & ~0x00000400);
2115         return this;
2116       }
2117 
2118       public Builder clone() {
2119         return create().mergeFrom(buildPartial());
2120       }
2121 
2122       public com.google.protobuf.Descriptors.Descriptor
2123           getDescriptorForType() {
2124         return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_WALKey_descriptor;
2125       }
2126 
2127       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey getDefaultInstanceForType() {
2128         return org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.getDefaultInstance();
2129       }
2130 
2131       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey build() {
2132         org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey result = buildPartial();
2133         if (!result.isInitialized()) {
2134           throw newUninitializedMessageException(result);
2135         }
2136         return result;
2137       }
2138 
2139       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey buildPartial() {
2140         org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey(this);
2141         int from_bitField0_ = bitField0_;
2142         int to_bitField0_ = 0;
2143         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
2144           to_bitField0_ |= 0x00000001;
2145         }
2146         result.encodedRegionName_ = encodedRegionName_;
2147         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
2148           to_bitField0_ |= 0x00000002;
2149         }
2150         result.tableName_ = tableName_;
2151         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
2152           to_bitField0_ |= 0x00000004;
2153         }
2154         result.logSequenceNumber_ = logSequenceNumber_;
2155         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
2156           to_bitField0_ |= 0x00000008;
2157         }
2158         result.writeTime_ = writeTime_;
2159         if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
2160           to_bitField0_ |= 0x00000010;
2161         }
2162         if (clusterIdBuilder_ == null) {
2163           result.clusterId_ = clusterId_;
2164         } else {
2165           result.clusterId_ = clusterIdBuilder_.build();
2166         }
2167         if (scopesBuilder_ == null) {
2168           if (((bitField0_ & 0x00000020) == 0x00000020)) {
2169             scopes_ = java.util.Collections.unmodifiableList(scopes_);
2170             bitField0_ = (bitField0_ & ~0x00000020);
2171           }
2172           result.scopes_ = scopes_;
2173         } else {
2174           result.scopes_ = scopesBuilder_.build();
2175         }
2176         if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
2177           to_bitField0_ |= 0x00000020;
2178         }
2179         result.followingKvCount_ = followingKvCount_;
2180         if (clusterIdsBuilder_ == null) {
2181           if (((bitField0_ & 0x00000080) == 0x00000080)) {
2182             clusterIds_ = java.util.Collections.unmodifiableList(clusterIds_);
2183             bitField0_ = (bitField0_ & ~0x00000080);
2184           }
2185           result.clusterIds_ = clusterIds_;
2186         } else {
2187           result.clusterIds_ = clusterIdsBuilder_.build();
2188         }
2189         if (((from_bitField0_ & 0x00000100) == 0x00000100)) {
2190           to_bitField0_ |= 0x00000040;
2191         }
2192         result.nonceGroup_ = nonceGroup_;
2193         if (((from_bitField0_ & 0x00000200) == 0x00000200)) {
2194           to_bitField0_ |= 0x00000080;
2195         }
2196         result.nonce_ = nonce_;
2197         if (((from_bitField0_ & 0x00000400) == 0x00000400)) {
2198           to_bitField0_ |= 0x00000100;
2199         }
2200         result.origSequenceNumber_ = origSequenceNumber_;
2201         result.bitField0_ = to_bitField0_;
2202         onBuilt();
2203         return result;
2204       }
2205 
2206       public Builder mergeFrom(com.google.protobuf.Message other) {
2207         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey) {
2208           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey)other);
2209         } else {
2210           super.mergeFrom(other);
2211           return this;
2212         }
2213       }
2214 
2215       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey other) {
2216         if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.getDefaultInstance()) return this;
2217         if (other.hasEncodedRegionName()) {
2218           setEncodedRegionName(other.getEncodedRegionName());
2219         }
2220         if (other.hasTableName()) {
2221           setTableName(other.getTableName());
2222         }
2223         if (other.hasLogSequenceNumber()) {
2224           setLogSequenceNumber(other.getLogSequenceNumber());
2225         }
2226         if (other.hasWriteTime()) {
2227           setWriteTime(other.getWriteTime());
2228         }
2229         if (other.hasClusterId()) {
2230           mergeClusterId(other.getClusterId());
2231         }
2232         if (scopesBuilder_ == null) {
2233           if (!other.scopes_.isEmpty()) {
2234             if (scopes_.isEmpty()) {
2235               scopes_ = other.scopes_;
2236               bitField0_ = (bitField0_ & ~0x00000020);
2237             } else {
2238               ensureScopesIsMutable();
2239               scopes_.addAll(other.scopes_);
2240             }
2241             onChanged();
2242           }
2243         } else {
2244           if (!other.scopes_.isEmpty()) {
2245             if (scopesBuilder_.isEmpty()) {
2246               scopesBuilder_.dispose();
2247               scopesBuilder_ = null;
2248               scopes_ = other.scopes_;
2249               bitField0_ = (bitField0_ & ~0x00000020);
2250               scopesBuilder_ = 
2251                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
2252                    getScopesFieldBuilder() : null;
2253             } else {
2254               scopesBuilder_.addAllMessages(other.scopes_);
2255             }
2256           }
2257         }
2258         if (other.hasFollowingKvCount()) {
2259           setFollowingKvCount(other.getFollowingKvCount());
2260         }
2261         if (clusterIdsBuilder_ == null) {
2262           if (!other.clusterIds_.isEmpty()) {
2263             if (clusterIds_.isEmpty()) {
2264               clusterIds_ = other.clusterIds_;
2265               bitField0_ = (bitField0_ & ~0x00000080);
2266             } else {
2267               ensureClusterIdsIsMutable();
2268               clusterIds_.addAll(other.clusterIds_);
2269             }
2270             onChanged();
2271           }
2272         } else {
2273           if (!other.clusterIds_.isEmpty()) {
2274             if (clusterIdsBuilder_.isEmpty()) {
2275               clusterIdsBuilder_.dispose();
2276               clusterIdsBuilder_ = null;
2277               clusterIds_ = other.clusterIds_;
2278               bitField0_ = (bitField0_ & ~0x00000080);
2279               clusterIdsBuilder_ = 
2280                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
2281                    getClusterIdsFieldBuilder() : null;
2282             } else {
2283               clusterIdsBuilder_.addAllMessages(other.clusterIds_);
2284             }
2285           }
2286         }
2287         if (other.hasNonceGroup()) {
2288           setNonceGroup(other.getNonceGroup());
2289         }
2290         if (other.hasNonce()) {
2291           setNonce(other.getNonce());
2292         }
2293         if (other.hasOrigSequenceNumber()) {
2294           setOrigSequenceNumber(other.getOrigSequenceNumber());
2295         }
2296         this.mergeUnknownFields(other.getUnknownFields());
2297         return this;
2298       }
2299 
2300       public final boolean isInitialized() {
2301         if (!hasEncodedRegionName()) {
2302           
2303           return false;
2304         }
2305         if (!hasTableName()) {
2306           
2307           return false;
2308         }
2309         if (!hasLogSequenceNumber()) {
2310           
2311           return false;
2312         }
2313         if (!hasWriteTime()) {
2314           
2315           return false;
2316         }
2317         if (hasClusterId()) {
2318           if (!getClusterId().isInitialized()) {
2319             
2320             return false;
2321           }
2322         }
2323         for (int i = 0; i < getScopesCount(); i++) {
2324           if (!getScopes(i).isInitialized()) {
2325             
2326             return false;
2327           }
2328         }
2329         for (int i = 0; i < getClusterIdsCount(); i++) {
2330           if (!getClusterIds(i).isInitialized()) {
2331             
2332             return false;
2333           }
2334         }
2335         return true;
2336       }
2337 
2338       public Builder mergeFrom(
2339           com.google.protobuf.CodedInputStream input,
2340           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2341           throws java.io.IOException {
2342         org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parsedMessage = null;
2343         try {
2344           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
2345         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
2346           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey) e.getUnfinishedMessage();
2347           throw e;
2348         } finally {
2349           if (parsedMessage != null) {
2350             mergeFrom(parsedMessage);
2351           }
2352         }
2353         return this;
2354       }
2355       private int bitField0_;
2356 
2357       // required bytes encoded_region_name = 1;
2358       private com.google.protobuf.ByteString encodedRegionName_ = com.google.protobuf.ByteString.EMPTY;
2359       /**
2360        * <code>required bytes encoded_region_name = 1;</code>
2361        */
2362       public boolean hasEncodedRegionName() {
2363         return ((bitField0_ & 0x00000001) == 0x00000001);
2364       }
2365       /**
2366        * <code>required bytes encoded_region_name = 1;</code>
2367        */
2368       public com.google.protobuf.ByteString getEncodedRegionName() {
2369         return encodedRegionName_;
2370       }
2371       /**
2372        * <code>required bytes encoded_region_name = 1;</code>
2373        */
2374       public Builder setEncodedRegionName(com.google.protobuf.ByteString value) {
2375         if (value == null) {
2376     throw new NullPointerException();
2377   }
2378   bitField0_ |= 0x00000001;
2379         encodedRegionName_ = value;
2380         onChanged();
2381         return this;
2382       }
2383       /**
2384        * <code>required bytes encoded_region_name = 1;</code>
2385        */
2386       public Builder clearEncodedRegionName() {
2387         bitField0_ = (bitField0_ & ~0x00000001);
2388         encodedRegionName_ = getDefaultInstance().getEncodedRegionName();
2389         onChanged();
2390         return this;
2391       }
2392 
2393       // required bytes table_name = 2;
2394       private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY;
2395       /**
2396        * <code>required bytes table_name = 2;</code>
2397        */
2398       public boolean hasTableName() {
2399         return ((bitField0_ & 0x00000002) == 0x00000002);
2400       }
2401       /**
2402        * <code>required bytes table_name = 2;</code>
2403        */
2404       public com.google.protobuf.ByteString getTableName() {
2405         return tableName_;
2406       }
2407       /**
2408        * <code>required bytes table_name = 2;</code>
2409        */
2410       public Builder setTableName(com.google.protobuf.ByteString value) {
2411         if (value == null) {
2412     throw new NullPointerException();
2413   }
2414   bitField0_ |= 0x00000002;
2415         tableName_ = value;
2416         onChanged();
2417         return this;
2418       }
2419       /**
2420        * <code>required bytes table_name = 2;</code>
2421        */
2422       public Builder clearTableName() {
2423         bitField0_ = (bitField0_ & ~0x00000002);
2424         tableName_ = getDefaultInstance().getTableName();
2425         onChanged();
2426         return this;
2427       }
2428 
2429       // required uint64 log_sequence_number = 3;
2430       private long logSequenceNumber_ ;
2431       /**
2432        * <code>required uint64 log_sequence_number = 3;</code>
2433        */
2434       public boolean hasLogSequenceNumber() {
2435         return ((bitField0_ & 0x00000004) == 0x00000004);
2436       }
2437       /**
2438        * <code>required uint64 log_sequence_number = 3;</code>
2439        */
2440       public long getLogSequenceNumber() {
2441         return logSequenceNumber_;
2442       }
2443       /**
2444        * <code>required uint64 log_sequence_number = 3;</code>
2445        */
2446       public Builder setLogSequenceNumber(long value) {
2447         bitField0_ |= 0x00000004;
2448         logSequenceNumber_ = value;
2449         onChanged();
2450         return this;
2451       }
2452       /**
2453        * <code>required uint64 log_sequence_number = 3;</code>
2454        */
2455       public Builder clearLogSequenceNumber() {
2456         bitField0_ = (bitField0_ & ~0x00000004);
2457         logSequenceNumber_ = 0L;
2458         onChanged();
2459         return this;
2460       }
2461 
2462       // required uint64 write_time = 4;
2463       private long writeTime_ ;
2464       /**
2465        * <code>required uint64 write_time = 4;</code>
2466        */
2467       public boolean hasWriteTime() {
2468         return ((bitField0_ & 0x00000008) == 0x00000008);
2469       }
2470       /**
2471        * <code>required uint64 write_time = 4;</code>
2472        */
2473       public long getWriteTime() {
2474         return writeTime_;
2475       }
2476       /**
2477        * <code>required uint64 write_time = 4;</code>
2478        */
2479       public Builder setWriteTime(long value) {
2480         bitField0_ |= 0x00000008;
2481         writeTime_ = value;
2482         onChanged();
2483         return this;
2484       }
2485       /**
2486        * <code>required uint64 write_time = 4;</code>
2487        */
2488       public Builder clearWriteTime() {
2489         bitField0_ = (bitField0_ & ~0x00000008);
2490         writeTime_ = 0L;
2491         onChanged();
2492         return this;
2493       }
2494 
2495       // optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];
2496       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID clusterId_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.getDefaultInstance();
2497       private com.google.protobuf.SingleFieldBuilder<
2498           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder> clusterIdBuilder_;
2499       /**
2500        * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code>
2501        *
2502        * <pre>
2503        *
2504        *This parameter is deprecated in favor of clusters which 
2505        *contains the list of clusters that have consumed the change.
2506        *It is retained so that the log created by earlier releases (0.94) 
2507        *can be read by the newer releases.
2508        * </pre>
2509        */
2510       @java.lang.Deprecated public boolean hasClusterId() {
2511         return ((bitField0_ & 0x00000010) == 0x00000010);
2512       }
2513       /**
2514        * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code>
2515        *
2516        * <pre>
2517        *
2518        *This parameter is deprecated in favor of clusters which 
2519        *contains the list of clusters that have consumed the change.
2520        *It is retained so that the log created by earlier releases (0.94) 
2521        *can be read by the newer releases.
2522        * </pre>
2523        */
2524       @java.lang.Deprecated public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID getClusterId() {
2525         if (clusterIdBuilder_ == null) {
2526           return clusterId_;
2527         } else {
2528           return clusterIdBuilder_.getMessage();
2529         }
2530       }
2531       /**
2532        * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code>
2533        *
2534        * <pre>
2535        *
2536        *This parameter is deprecated in favor of clusters which 
2537        *contains the list of clusters that have consumed the change.
2538        *It is retained so that the log created by earlier releases (0.94) 
2539        *can be read by the newer releases.
2540        * </pre>
2541        */
2542       @java.lang.Deprecated public Builder setClusterId(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID value) {
2543         if (clusterIdBuilder_ == null) {
2544           if (value == null) {
2545             throw new NullPointerException();
2546           }
2547           clusterId_ = value;
2548           onChanged();
2549         } else {
2550           clusterIdBuilder_.setMessage(value);
2551         }
2552         bitField0_ |= 0x00000010;
2553         return this;
2554       }
2555       /**
2556        * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code>
2557        *
2558        * <pre>
2559        *
2560        *This parameter is deprecated in favor of clusters which 
2561        *contains the list of clusters that have consumed the change.
2562        *It is retained so that the log created by earlier releases (0.94) 
2563        *can be read by the newer releases.
2564        * </pre>
2565        */
2566       @java.lang.Deprecated public Builder setClusterId(
2567           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder builderForValue) {
2568         if (clusterIdBuilder_ == null) {
2569           clusterId_ = builderForValue.build();
2570           onChanged();
2571         } else {
2572           clusterIdBuilder_.setMessage(builderForValue.build());
2573         }
2574         bitField0_ |= 0x00000010;
2575         return this;
2576       }
2577       /**
2578        * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code>
2579        *
2580        * <pre>
2581        *
2582        *This parameter is deprecated in favor of clusters which 
2583        *contains the list of clusters that have consumed the change.
2584        *It is retained so that the log created by earlier releases (0.94) 
2585        *can be read by the newer releases.
2586        * </pre>
2587        */
2588       @java.lang.Deprecated public Builder mergeClusterId(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID value) {
2589         if (clusterIdBuilder_ == null) {
2590           if (((bitField0_ & 0x00000010) == 0x00000010) &&
2591               clusterId_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.getDefaultInstance()) {
2592             clusterId_ =
2593               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.newBuilder(clusterId_).mergeFrom(value).buildPartial();
2594           } else {
2595             clusterId_ = value;
2596           }
2597           onChanged();
2598         } else {
2599           clusterIdBuilder_.mergeFrom(value);
2600         }
2601         bitField0_ |= 0x00000010;
2602         return this;
2603       }
2604       /**
2605        * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code>
2606        *
2607        * <pre>
2608        *
2609        *This parameter is deprecated in favor of clusters which 
2610        *contains the list of clusters that have consumed the change.
2611        *It is retained so that the log created by earlier releases (0.94) 
2612        *can be read by the newer releases.
2613        * </pre>
2614        */
2615       @java.lang.Deprecated public Builder clearClusterId() {
2616         if (clusterIdBuilder_ == null) {
2617           clusterId_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.getDefaultInstance();
2618           onChanged();
2619         } else {
2620           clusterIdBuilder_.clear();
2621         }
2622         bitField0_ = (bitField0_ & ~0x00000010);
2623         return this;
2624       }
2625       /**
2626        * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code>
2627        *
2628        * <pre>
2629        *
2630        *This parameter is deprecated in favor of clusters which 
2631        *contains the list of clusters that have consumed the change.
2632        *It is retained so that the log created by earlier releases (0.94) 
2633        *can be read by the newer releases.
2634        * </pre>
2635        */
2636       @java.lang.Deprecated public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder getClusterIdBuilder() {
2637         bitField0_ |= 0x00000010;
2638         onChanged();
2639         return getClusterIdFieldBuilder().getBuilder();
2640       }
2641       /**
2642        * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code>
2643        *
2644        * <pre>
2645        *
2646        *This parameter is deprecated in favor of clusters which 
2647        *contains the list of clusters that have consumed the change.
2648        *It is retained so that the log created by earlier releases (0.94) 
2649        *can be read by the newer releases.
2650        * </pre>
2651        */
2652       @java.lang.Deprecated public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder getClusterIdOrBuilder() {
2653         if (clusterIdBuilder_ != null) {
2654           return clusterIdBuilder_.getMessageOrBuilder();
2655         } else {
2656           return clusterId_;
2657         }
2658       }
2659       /**
2660        * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code>
2661        *
2662        * <pre>
2663        *
2664        *This parameter is deprecated in favor of clusters which 
2665        *contains the list of clusters that have consumed the change.
2666        *It is retained so that the log created by earlier releases (0.94) 
2667        *can be read by the newer releases.
2668        * </pre>
2669        */
2670       private com.google.protobuf.SingleFieldBuilder<
2671           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder> 
2672           getClusterIdFieldBuilder() {
2673         if (clusterIdBuilder_ == null) {
2674           clusterIdBuilder_ = new com.google.protobuf.SingleFieldBuilder<
2675               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder>(
2676                   clusterId_,
2677                   getParentForChildren(),
2678                   isClean());
2679           clusterId_ = null;
2680         }
2681         return clusterIdBuilder_;
2682       }
2683 
2684       // repeated .hbase.pb.FamilyScope scopes = 6;
2685       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope> scopes_ =
2686         java.util.Collections.emptyList();
2687       private void ensureScopesIsMutable() {
2688         if (!((bitField0_ & 0x00000020) == 0x00000020)) {
2689           scopes_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope>(scopes_);
2690           bitField0_ |= 0x00000020;
2691          }
2692       }
2693 
2694       private com.google.protobuf.RepeatedFieldBuilder<
2695           org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScopeOrBuilder> scopesBuilder_;
2696 
2697       /**
2698        * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code>
2699        */
2700       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope> getScopesList() {
2701         if (scopesBuilder_ == null) {
2702           return java.util.Collections.unmodifiableList(scopes_);
2703         } else {
2704           return scopesBuilder_.getMessageList();
2705         }
2706       }
2707       /**
2708        * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code>
2709        */
2710       public int getScopesCount() {
2711         if (scopesBuilder_ == null) {
2712           return scopes_.size();
2713         } else {
2714           return scopesBuilder_.getCount();
2715         }
2716       }
2717       /**
2718        * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code>
2719        */
2720       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope getScopes(int index) {
2721         if (scopesBuilder_ == null) {
2722           return scopes_.get(index);
2723         } else {
2724           return scopesBuilder_.getMessage(index);
2725         }
2726       }
2727       /**
2728        * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code>
2729        */
2730       public Builder setScopes(
2731           int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope value) {
2732         if (scopesBuilder_ == null) {
2733           if (value == null) {
2734             throw new NullPointerException();
2735           }
2736           ensureScopesIsMutable();
2737           scopes_.set(index, value);
2738           onChanged();
2739         } else {
2740           scopesBuilder_.setMessage(index, value);
2741         }
2742         return this;
2743       }
2744       /**
2745        * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code>
2746        */
2747       public Builder setScopes(
2748           int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder builderForValue) {
2749         if (scopesBuilder_ == null) {
2750           ensureScopesIsMutable();
2751           scopes_.set(index, builderForValue.build());
2752           onChanged();
2753         } else {
2754           scopesBuilder_.setMessage(index, builderForValue.build());
2755         }
2756         return this;
2757       }
2758       /**
2759        * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code>
2760        */
2761       public Builder addScopes(org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope value) {
2762         if (scopesBuilder_ == null) {
2763           if (value == null) {
2764             throw new NullPointerException();
2765           }
2766           ensureScopesIsMutable();
2767           scopes_.add(value);
2768           onChanged();
2769         } else {
2770           scopesBuilder_.addMessage(value);
2771         }
2772         return this;
2773       }
2774       /**
2775        * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code>
2776        */
2777       public Builder addScopes(
2778           int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope value) {
2779         if (scopesBuilder_ == null) {
2780           if (value == null) {
2781             throw new NullPointerException();
2782           }
2783           ensureScopesIsMutable();
2784           scopes_.add(index, value);
2785           onChanged();
2786         } else {
2787           scopesBuilder_.addMessage(index, value);
2788         }
2789         return this;
2790       }
2791       /**
2792        * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code>
2793        */
2794       public Builder addScopes(
2795           org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder builderForValue) {
2796         if (scopesBuilder_ == null) {
2797           ensureScopesIsMutable();
2798           scopes_.add(builderForValue.build());
2799           onChanged();
2800         } else {
2801           scopesBuilder_.addMessage(builderForValue.build());
2802         }
2803         return this;
2804       }
2805       /**
2806        * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code>
2807        */
2808       public Builder addScopes(
2809           int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder builderForValue) {
2810         if (scopesBuilder_ == null) {
2811           ensureScopesIsMutable();
2812           scopes_.add(index, builderForValue.build());
2813           onChanged();
2814         } else {
2815           scopesBuilder_.addMessage(index, builderForValue.build());
2816         }
2817         return this;
2818       }
2819       /**
2820        * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code>
2821        */
2822       public Builder addAllScopes(
2823           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope> values) {
2824         if (scopesBuilder_ == null) {
2825           ensureScopesIsMutable();
2826           super.addAll(values, scopes_);
2827           onChanged();
2828         } else {
2829           scopesBuilder_.addAllMessages(values);
2830         }
2831         return this;
2832       }
2833       /**
2834        * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code>
2835        */
2836       public Builder clearScopes() {
2837         if (scopesBuilder_ == null) {
2838           scopes_ = java.util.Collections.emptyList();
2839           bitField0_ = (bitField0_ & ~0x00000020);
2840           onChanged();
2841         } else {
2842           scopesBuilder_.clear();
2843         }
2844         return this;
2845       }
2846       /**
2847        * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code>
2848        */
2849       public Builder removeScopes(int index) {
2850         if (scopesBuilder_ == null) {
2851           ensureScopesIsMutable();
2852           scopes_.remove(index);
2853           onChanged();
2854         } else {
2855           scopesBuilder_.remove(index);
2856         }
2857         return this;
2858       }
2859       /**
2860        * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code>
2861        */
2862       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder getScopesBuilder(
2863           int index) {
2864         return getScopesFieldBuilder().getBuilder(index);
2865       }
2866       /**
2867        * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code>
2868        */
2869       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScopeOrBuilder getScopesOrBuilder(
2870           int index) {
2871         if (scopesBuilder_ == null) {
2872           return scopes_.get(index);  } else {
2873           return scopesBuilder_.getMessageOrBuilder(index);
2874         }
2875       }
2876       /**
2877        * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code>
2878        */
2879       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScopeOrBuilder> 
2880            getScopesOrBuilderList() {
2881         if (scopesBuilder_ != null) {
2882           return scopesBuilder_.getMessageOrBuilderList();
2883         } else {
2884           return java.util.Collections.unmodifiableList(scopes_);
2885         }
2886       }
2887       /**
2888        * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code>
2889        */
2890       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder addScopesBuilder() {
2891         return getScopesFieldBuilder().addBuilder(
2892             org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.getDefaultInstance());
2893       }
2894       /**
2895        * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code>
2896        */
2897       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder addScopesBuilder(
2898           int index) {
2899         return getScopesFieldBuilder().addBuilder(
2900             index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.getDefaultInstance());
2901       }
2902       /**
2903        * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code>
2904        */
2905       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder> 
2906            getScopesBuilderList() {
2907         return getScopesFieldBuilder().getBuilderList();
2908       }
2909       private com.google.protobuf.RepeatedFieldBuilder<
2910           org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScopeOrBuilder> 
2911           getScopesFieldBuilder() {
2912         if (scopesBuilder_ == null) {
2913           scopesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
2914               org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScopeOrBuilder>(
2915                   scopes_,
2916                   ((bitField0_ & 0x00000020) == 0x00000020),
2917                   getParentForChildren(),
2918                   isClean());
2919           scopes_ = null;
2920         }
2921         return scopesBuilder_;
2922       }
2923 
2924       // optional uint32 following_kv_count = 7;
2925       private int followingKvCount_ ;
2926       /**
2927        * <code>optional uint32 following_kv_count = 7;</code>
2928        */
2929       public boolean hasFollowingKvCount() {
2930         return ((bitField0_ & 0x00000040) == 0x00000040);
2931       }
2932       /**
2933        * <code>optional uint32 following_kv_count = 7;</code>
2934        */
2935       public int getFollowingKvCount() {
2936         return followingKvCount_;
2937       }
2938       /**
2939        * <code>optional uint32 following_kv_count = 7;</code>
2940        */
2941       public Builder setFollowingKvCount(int value) {
2942         bitField0_ |= 0x00000040;
2943         followingKvCount_ = value;
2944         onChanged();
2945         return this;
2946       }
2947       /**
2948        * <code>optional uint32 following_kv_count = 7;</code>
2949        */
2950       public Builder clearFollowingKvCount() {
2951         bitField0_ = (bitField0_ & ~0x00000040);
2952         followingKvCount_ = 0;
2953         onChanged();
2954         return this;
2955       }
2956 
2957       // repeated .hbase.pb.UUID cluster_ids = 8;
2958       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID> clusterIds_ =
2959         java.util.Collections.emptyList();
2960       private void ensureClusterIdsIsMutable() {
2961         if (!((bitField0_ & 0x00000080) == 0x00000080)) {
2962           clusterIds_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID>(clusterIds_);
2963           bitField0_ |= 0x00000080;
2964          }
2965       }
2966 
2967       private com.google.protobuf.RepeatedFieldBuilder<
2968           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder> clusterIdsBuilder_;
2969 
2970       /**
2971        * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
2972        *
2973        * <pre>
2974        *
2975        *This field contains the list of clusters that have
2976        *consumed the change
2977        * </pre>
2978        */
2979       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID> getClusterIdsList() {
2980         if (clusterIdsBuilder_ == null) {
2981           return java.util.Collections.unmodifiableList(clusterIds_);
2982         } else {
2983           return clusterIdsBuilder_.getMessageList();
2984         }
2985       }
2986       /**
2987        * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
2988        *
2989        * <pre>
2990        *
2991        *This field contains the list of clusters that have
2992        *consumed the change
2993        * </pre>
2994        */
2995       public int getClusterIdsCount() {
2996         if (clusterIdsBuilder_ == null) {
2997           return clusterIds_.size();
2998         } else {
2999           return clusterIdsBuilder_.getCount();
3000         }
3001       }
3002       /**
3003        * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
3004        *
3005        * <pre>
3006        *
3007        *This field contains the list of clusters that have
3008        *consumed the change
3009        * </pre>
3010        */
3011       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID getClusterIds(int index) {
3012         if (clusterIdsBuilder_ == null) {
3013           return clusterIds_.get(index);
3014         } else {
3015           return clusterIdsBuilder_.getMessage(index);
3016         }
3017       }
3018       /**
3019        * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
3020        *
3021        * <pre>
3022        *
3023        *This field contains the list of clusters that have
3024        *consumed the change
3025        * </pre>
3026        */
3027       public Builder setClusterIds(
3028           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID value) {
3029         if (clusterIdsBuilder_ == null) {
3030           if (value == null) {
3031             throw new NullPointerException();
3032           }
3033           ensureClusterIdsIsMutable();
3034           clusterIds_.set(index, value);
3035           onChanged();
3036         } else {
3037           clusterIdsBuilder_.setMessage(index, value);
3038         }
3039         return this;
3040       }
3041       /**
3042        * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
3043        *
3044        * <pre>
3045        *
3046        *This field contains the list of clusters that have
3047        *consumed the change
3048        * </pre>
3049        */
3050       public Builder setClusterIds(
3051           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder builderForValue) {
3052         if (clusterIdsBuilder_ == null) {
3053           ensureClusterIdsIsMutable();
3054           clusterIds_.set(index, builderForValue.build());
3055           onChanged();
3056         } else {
3057           clusterIdsBuilder_.setMessage(index, builderForValue.build());
3058         }
3059         return this;
3060       }
3061       /**
3062        * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
3063        *
3064        * <pre>
3065        *
3066        *This field contains the list of clusters that have
3067        *consumed the change
3068        * </pre>
3069        */
3070       public Builder addClusterIds(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID value) {
3071         if (clusterIdsBuilder_ == null) {
3072           if (value == null) {
3073             throw new NullPointerException();
3074           }
3075           ensureClusterIdsIsMutable();
3076           clusterIds_.add(value);
3077           onChanged();
3078         } else {
3079           clusterIdsBuilder_.addMessage(value);
3080         }
3081         return this;
3082       }
3083       /**
3084        * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
3085        *
3086        * <pre>
3087        *
3088        *This field contains the list of clusters that have
3089        *consumed the change
3090        * </pre>
3091        */
3092       public Builder addClusterIds(
3093           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID value) {
3094         if (clusterIdsBuilder_ == null) {
3095           if (value == null) {
3096             throw new NullPointerException();
3097           }
3098           ensureClusterIdsIsMutable();
3099           clusterIds_.add(index, value);
3100           onChanged();
3101         } else {
3102           clusterIdsBuilder_.addMessage(index, value);
3103         }
3104         return this;
3105       }
3106       /**
3107        * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
3108        *
3109        * <pre>
3110        *
3111        *This field contains the list of clusters that have
3112        *consumed the change
3113        * </pre>
3114        */
3115       public Builder addClusterIds(
3116           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder builderForValue) {
3117         if (clusterIdsBuilder_ == null) {
3118           ensureClusterIdsIsMutable();
3119           clusterIds_.add(builderForValue.build());
3120           onChanged();
3121         } else {
3122           clusterIdsBuilder_.addMessage(builderForValue.build());
3123         }
3124         return this;
3125       }
3126       /**
3127        * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
3128        *
3129        * <pre>
3130        *
3131        *This field contains the list of clusters that have
3132        *consumed the change
3133        * </pre>
3134        */
3135       public Builder addClusterIds(
3136           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder builderForValue) {
3137         if (clusterIdsBuilder_ == null) {
3138           ensureClusterIdsIsMutable();
3139           clusterIds_.add(index, builderForValue.build());
3140           onChanged();
3141         } else {
3142           clusterIdsBuilder_.addMessage(index, builderForValue.build());
3143         }
3144         return this;
3145       }
3146       /**
3147        * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
3148        *
3149        * <pre>
3150        *
3151        *This field contains the list of clusters that have
3152        *consumed the change
3153        * </pre>
3154        */
3155       public Builder addAllClusterIds(
3156           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID> values) {
3157         if (clusterIdsBuilder_ == null) {
3158           ensureClusterIdsIsMutable();
3159           super.addAll(values, clusterIds_);
3160           onChanged();
3161         } else {
3162           clusterIdsBuilder_.addAllMessages(values);
3163         }
3164         return this;
3165       }
3166       /**
3167        * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
3168        *
3169        * <pre>
3170        *
3171        *This field contains the list of clusters that have
3172        *consumed the change
3173        * </pre>
3174        */
3175       public Builder clearClusterIds() {
3176         if (clusterIdsBuilder_ == null) {
3177           clusterIds_ = java.util.Collections.emptyList();
3178           bitField0_ = (bitField0_ & ~0x00000080);
3179           onChanged();
3180         } else {
3181           clusterIdsBuilder_.clear();
3182         }
3183         return this;
3184       }
3185       /**
3186        * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
3187        *
3188        * <pre>
3189        *
3190        *This field contains the list of clusters that have
3191        *consumed the change
3192        * </pre>
3193        */
3194       public Builder removeClusterIds(int index) {
3195         if (clusterIdsBuilder_ == null) {
3196           ensureClusterIdsIsMutable();
3197           clusterIds_.remove(index);
3198           onChanged();
3199         } else {
3200           clusterIdsBuilder_.remove(index);
3201         }
3202         return this;
3203       }
3204       /**
3205        * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
3206        *
3207        * <pre>
3208        *
3209        *This field contains the list of clusters that have
3210        *consumed the change
3211        * </pre>
3212        */
3213       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder getClusterIdsBuilder(
3214           int index) {
3215         return getClusterIdsFieldBuilder().getBuilder(index);
3216       }
3217       /**
3218        * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
3219        *
3220        * <pre>
3221        *
3222        *This field contains the list of clusters that have
3223        *consumed the change
3224        * </pre>
3225        */
3226       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder getClusterIdsOrBuilder(
3227           int index) {
3228         if (clusterIdsBuilder_ == null) {
3229           return clusterIds_.get(index);  } else {
3230           return clusterIdsBuilder_.getMessageOrBuilder(index);
3231         }
3232       }
3233       /**
3234        * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
3235        *
3236        * <pre>
3237        *
3238        *This field contains the list of clusters that have
3239        *consumed the change
3240        * </pre>
3241        */
3242       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder> 
3243            getClusterIdsOrBuilderList() {
3244         if (clusterIdsBuilder_ != null) {
3245           return clusterIdsBuilder_.getMessageOrBuilderList();
3246         } else {
3247           return java.util.Collections.unmodifiableList(clusterIds_);
3248         }
3249       }
3250       /**
3251        * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
3252        *
3253        * <pre>
3254        *
3255        *This field contains the list of clusters that have
3256        *consumed the change
3257        * </pre>
3258        */
3259       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder addClusterIdsBuilder() {
3260         return getClusterIdsFieldBuilder().addBuilder(
3261             org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.getDefaultInstance());
3262       }
3263       /**
3264        * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
3265        *
3266        * <pre>
3267        *
3268        *This field contains the list of clusters that have
3269        *consumed the change
3270        * </pre>
3271        */
3272       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder addClusterIdsBuilder(
3273           int index) {
3274         return getClusterIdsFieldBuilder().addBuilder(
3275             index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.getDefaultInstance());
3276       }
3277       /**
3278        * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
3279        *
3280        * <pre>
3281        *
3282        *This field contains the list of clusters that have
3283        *consumed the change
3284        * </pre>
3285        */
3286       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder> 
3287            getClusterIdsBuilderList() {
3288         return getClusterIdsFieldBuilder().getBuilderList();
3289       }
3290       private com.google.protobuf.RepeatedFieldBuilder<
3291           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder> 
3292           getClusterIdsFieldBuilder() {
3293         if (clusterIdsBuilder_ == null) {
3294           clusterIdsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
3295               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder>(
3296                   clusterIds_,
3297                   ((bitField0_ & 0x00000080) == 0x00000080),
3298                   getParentForChildren(),
3299                   isClean());
3300           clusterIds_ = null;
3301         }
3302         return clusterIdsBuilder_;
3303       }
3304 
3305       // optional uint64 nonceGroup = 9;
3306       private long nonceGroup_ ;
3307       /**
3308        * <code>optional uint64 nonceGroup = 9;</code>
3309        */
3310       public boolean hasNonceGroup() {
3311         return ((bitField0_ & 0x00000100) == 0x00000100);
3312       }
3313       /**
3314        * <code>optional uint64 nonceGroup = 9;</code>
3315        */
3316       public long getNonceGroup() {
3317         return nonceGroup_;
3318       }
3319       /**
3320        * <code>optional uint64 nonceGroup = 9;</code>
3321        */
3322       public Builder setNonceGroup(long value) {
3323         bitField0_ |= 0x00000100;
3324         nonceGroup_ = value;
3325         onChanged();
3326         return this;
3327       }
3328       /**
3329        * <code>optional uint64 nonceGroup = 9;</code>
3330        */
3331       public Builder clearNonceGroup() {
3332         bitField0_ = (bitField0_ & ~0x00000100);
3333         nonceGroup_ = 0L;
3334         onChanged();
3335         return this;
3336       }
3337 
3338       // optional uint64 nonce = 10;
3339       private long nonce_ ;
3340       /**
3341        * <code>optional uint64 nonce = 10;</code>
3342        */
3343       public boolean hasNonce() {
3344         return ((bitField0_ & 0x00000200) == 0x00000200);
3345       }
3346       /**
3347        * <code>optional uint64 nonce = 10;</code>
3348        */
3349       public long getNonce() {
3350         return nonce_;
3351       }
3352       /**
3353        * <code>optional uint64 nonce = 10;</code>
3354        */
3355       public Builder setNonce(long value) {
3356         bitField0_ |= 0x00000200;
3357         nonce_ = value;
3358         onChanged();
3359         return this;
3360       }
3361       /**
3362        * <code>optional uint64 nonce = 10;</code>
3363        */
3364       public Builder clearNonce() {
3365         bitField0_ = (bitField0_ & ~0x00000200);
3366         nonce_ = 0L;
3367         onChanged();
3368         return this;
3369       }
3370 
3371       // optional uint64 orig_sequence_number = 11;
3372       private long origSequenceNumber_ ;
3373       /**
3374        * <code>optional uint64 orig_sequence_number = 11;</code>
3375        */
3376       public boolean hasOrigSequenceNumber() {
3377         return ((bitField0_ & 0x00000400) == 0x00000400);
3378       }
3379       /**
3380        * <code>optional uint64 orig_sequence_number = 11;</code>
3381        */
3382       public long getOrigSequenceNumber() {
3383         return origSequenceNumber_;
3384       }
3385       /**
3386        * <code>optional uint64 orig_sequence_number = 11;</code>
3387        */
3388       public Builder setOrigSequenceNumber(long value) {
3389         bitField0_ |= 0x00000400;
3390         origSequenceNumber_ = value;
3391         onChanged();
3392         return this;
3393       }
3394       /**
3395        * <code>optional uint64 orig_sequence_number = 11;</code>
3396        */
3397       public Builder clearOrigSequenceNumber() {
3398         bitField0_ = (bitField0_ & ~0x00000400);
3399         origSequenceNumber_ = 0L;
3400         onChanged();
3401         return this;
3402       }
3403 
3404       // @@protoc_insertion_point(builder_scope:hbase.pb.WALKey)
3405     }
3406 
3407     static {
3408       defaultInstance = new WALKey(true);
3409       defaultInstance.initFields();
3410     }
3411 
3412     // @@protoc_insertion_point(class_scope:hbase.pb.WALKey)
3413   }
3414 
3415   public interface FamilyScopeOrBuilder
3416       extends com.google.protobuf.MessageOrBuilder {
3417 
3418     // required bytes family = 1;
3419     /**
3420      * <code>required bytes family = 1;</code>
3421      */
3422     boolean hasFamily();
3423     /**
3424      * <code>required bytes family = 1;</code>
3425      */
3426     com.google.protobuf.ByteString getFamily();
3427 
3428     // required .hbase.pb.ScopeType scope_type = 2;
3429     /**
3430      * <code>required .hbase.pb.ScopeType scope_type = 2;</code>
3431      */
3432     boolean hasScopeType();
3433     /**
3434      * <code>required .hbase.pb.ScopeType scope_type = 2;</code>
3435      */
3436     org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType getScopeType();
3437   }
3438   /**
3439    * Protobuf type {@code hbase.pb.FamilyScope}
3440    */
3441   public static final class FamilyScope extends
3442       com.google.protobuf.GeneratedMessage
3443       implements FamilyScopeOrBuilder {
3444     // Use FamilyScope.newBuilder() to construct.
3445     private FamilyScope(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
3446       super(builder);
3447       this.unknownFields = builder.getUnknownFields();
3448     }
3449     private FamilyScope(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
3450 
3451     private static final FamilyScope defaultInstance;
3452     public static FamilyScope getDefaultInstance() {
3453       return defaultInstance;
3454     }
3455 
3456     public FamilyScope getDefaultInstanceForType() {
3457       return defaultInstance;
3458     }
3459 
3460     private final com.google.protobuf.UnknownFieldSet unknownFields;
3461     @java.lang.Override
3462     public final com.google.protobuf.UnknownFieldSet
3463         getUnknownFields() {
3464       return this.unknownFields;
3465     }
3466     private FamilyScope(
3467         com.google.protobuf.CodedInputStream input,
3468         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3469         throws com.google.protobuf.InvalidProtocolBufferException {
3470       initFields();
3471       int mutable_bitField0_ = 0;
3472       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
3473           com.google.protobuf.UnknownFieldSet.newBuilder();
3474       try {
3475         boolean done = false;
3476         while (!done) {
3477           int tag = input.readTag();
3478           switch (tag) {
3479             case 0:
3480               done = true;
3481               break;
3482             default: {
3483               if (!parseUnknownField(input, unknownFields,
3484                                      extensionRegistry, tag)) {
3485                 done = true;
3486               }
3487               break;
3488             }
3489             case 10: {
3490               bitField0_ |= 0x00000001;
3491               family_ = input.readBytes();
3492               break;
3493             }
3494             case 16: {
3495               int rawValue = input.readEnum();
3496               org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType value = org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType.valueOf(rawValue);
3497               if (value == null) {
3498                 unknownFields.mergeVarintField(2, rawValue);
3499               } else {
3500                 bitField0_ |= 0x00000002;
3501                 scopeType_ = value;
3502               }
3503               break;
3504             }
3505           }
3506         }
3507       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
3508         throw e.setUnfinishedMessage(this);
3509       } catch (java.io.IOException e) {
3510         throw new com.google.protobuf.InvalidProtocolBufferException(
3511             e.getMessage()).setUnfinishedMessage(this);
3512       } finally {
3513         this.unknownFields = unknownFields.build();
3514         makeExtensionsImmutable();
3515       }
3516     }
3517     public static final com.google.protobuf.Descriptors.Descriptor
3518         getDescriptor() {
3519       return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_FamilyScope_descriptor;
3520     }
3521 
3522     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
3523         internalGetFieldAccessorTable() {
3524       return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_FamilyScope_fieldAccessorTable
3525           .ensureFieldAccessorsInitialized(
3526               org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder.class);
3527     }
3528 
3529     public static com.google.protobuf.Parser<FamilyScope> PARSER =
3530         new com.google.protobuf.AbstractParser<FamilyScope>() {
3531       public FamilyScope parsePartialFrom(
3532           com.google.protobuf.CodedInputStream input,
3533           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3534           throws com.google.protobuf.InvalidProtocolBufferException {
3535         return new FamilyScope(input, extensionRegistry);
3536       }
3537     };
3538 
3539     @java.lang.Override
3540     public com.google.protobuf.Parser<FamilyScope> getParserForType() {
3541       return PARSER;
3542     }
3543 
3544     private int bitField0_;
3545     // required bytes family = 1;
3546     public static final int FAMILY_FIELD_NUMBER = 1;
3547     private com.google.protobuf.ByteString family_;
3548     /**
3549      * <code>required bytes family = 1;</code>
3550      */
3551     public boolean hasFamily() {
3552       return ((bitField0_ & 0x00000001) == 0x00000001);
3553     }
3554     /**
3555      * <code>required bytes family = 1;</code>
3556      */
3557     public com.google.protobuf.ByteString getFamily() {
3558       return family_;
3559     }
3560 
3561     // required .hbase.pb.ScopeType scope_type = 2;
3562     public static final int SCOPE_TYPE_FIELD_NUMBER = 2;
3563     private org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType scopeType_;
3564     /**
3565      * <code>required .hbase.pb.ScopeType scope_type = 2;</code>
3566      */
3567     public boolean hasScopeType() {
3568       return ((bitField0_ & 0x00000002) == 0x00000002);
3569     }
3570     /**
3571      * <code>required .hbase.pb.ScopeType scope_type = 2;</code>
3572      */
3573     public org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType getScopeType() {
3574       return scopeType_;
3575     }
3576 
3577     private void initFields() {
3578       family_ = com.google.protobuf.ByteString.EMPTY;
3579       scopeType_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType.REPLICATION_SCOPE_LOCAL;
3580     }
3581     private byte memoizedIsInitialized = -1;
3582     public final boolean isInitialized() {
3583       byte isInitialized = memoizedIsInitialized;
3584       if (isInitialized != -1) return isInitialized == 1;
3585 
3586       if (!hasFamily()) {
3587         memoizedIsInitialized = 0;
3588         return false;
3589       }
3590       if (!hasScopeType()) {
3591         memoizedIsInitialized = 0;
3592         return false;
3593       }
3594       memoizedIsInitialized = 1;
3595       return true;
3596     }
3597 
3598     public void writeTo(com.google.protobuf.CodedOutputStream output)
3599                         throws java.io.IOException {
3600       getSerializedSize();
3601       if (((bitField0_ & 0x00000001) == 0x00000001)) {
3602         output.writeBytes(1, family_);
3603       }
3604       if (((bitField0_ & 0x00000002) == 0x00000002)) {
3605         output.writeEnum(2, scopeType_.getNumber());
3606       }
3607       getUnknownFields().writeTo(output);
3608     }
3609 
3610     private int memoizedSerializedSize = -1;
3611     public int getSerializedSize() {
3612       int size = memoizedSerializedSize;
3613       if (size != -1) return size;
3614 
3615       size = 0;
3616       if (((bitField0_ & 0x00000001) == 0x00000001)) {
3617         size += com.google.protobuf.CodedOutputStream
3618           .computeBytesSize(1, family_);
3619       }
3620       if (((bitField0_ & 0x00000002) == 0x00000002)) {
3621         size += com.google.protobuf.CodedOutputStream
3622           .computeEnumSize(2, scopeType_.getNumber());
3623       }
3624       size += getUnknownFields().getSerializedSize();
3625       memoizedSerializedSize = size;
3626       return size;
3627     }
3628 
3629     private static final long serialVersionUID = 0L;
3630     @java.lang.Override
3631     protected java.lang.Object writeReplace()
3632         throws java.io.ObjectStreamException {
3633       return super.writeReplace();
3634     }
3635 
3636     @java.lang.Override
3637     public boolean equals(final java.lang.Object obj) {
3638       if (obj == this) {
3639        return true;
3640       }
3641       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope)) {
3642         return super.equals(obj);
3643       }
3644       org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope other = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope) obj;
3645 
3646       boolean result = true;
3647       result = result && (hasFamily() == other.hasFamily());
3648       if (hasFamily()) {
3649         result = result && getFamily()
3650             .equals(other.getFamily());
3651       }
3652       result = result && (hasScopeType() == other.hasScopeType());
3653       if (hasScopeType()) {
3654         result = result &&
3655             (getScopeType() == other.getScopeType());
3656       }
3657       result = result &&
3658           getUnknownFields().equals(other.getUnknownFields());
3659       return result;
3660     }
3661 
3662     private int memoizedHashCode = 0;
3663     @java.lang.Override
3664     public int hashCode() {
3665       if (memoizedHashCode != 0) {
3666         return memoizedHashCode;
3667       }
3668       int hash = 41;
3669       hash = (19 * hash) + getDescriptorForType().hashCode();
3670       if (hasFamily()) {
3671         hash = (37 * hash) + FAMILY_FIELD_NUMBER;
3672         hash = (53 * hash) + getFamily().hashCode();
3673       }
3674       if (hasScopeType()) {
3675         hash = (37 * hash) + SCOPE_TYPE_FIELD_NUMBER;
3676         hash = (53 * hash) + hashEnum(getScopeType());
3677       }
3678       hash = (29 * hash) + getUnknownFields().hashCode();
3679       memoizedHashCode = hash;
3680       return hash;
3681     }
3682 
3683     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parseFrom(
3684         com.google.protobuf.ByteString data)
3685         throws com.google.protobuf.InvalidProtocolBufferException {
3686       return PARSER.parseFrom(data);
3687     }
3688     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parseFrom(
3689         com.google.protobuf.ByteString data,
3690         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3691         throws com.google.protobuf.InvalidProtocolBufferException {
3692       return PARSER.parseFrom(data, extensionRegistry);
3693     }
3694     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parseFrom(byte[] data)
3695         throws com.google.protobuf.InvalidProtocolBufferException {
3696       return PARSER.parseFrom(data);
3697     }
3698     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parseFrom(
3699         byte[] data,
3700         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3701         throws com.google.protobuf.InvalidProtocolBufferException {
3702       return PARSER.parseFrom(data, extensionRegistry);
3703     }
3704     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parseFrom(java.io.InputStream input)
3705         throws java.io.IOException {
3706       return PARSER.parseFrom(input);
3707     }
3708     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parseFrom(
3709         java.io.InputStream input,
3710         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3711         throws java.io.IOException {
3712       return PARSER.parseFrom(input, extensionRegistry);
3713     }
3714     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parseDelimitedFrom(java.io.InputStream input)
3715         throws java.io.IOException {
3716       return PARSER.parseDelimitedFrom(input);
3717     }
3718     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parseDelimitedFrom(
3719         java.io.InputStream input,
3720         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3721         throws java.io.IOException {
3722       return PARSER.parseDelimitedFrom(input, extensionRegistry);
3723     }
3724     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parseFrom(
3725         com.google.protobuf.CodedInputStream input)
3726         throws java.io.IOException {
3727       return PARSER.parseFrom(input);
3728     }
3729     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parseFrom(
3730         com.google.protobuf.CodedInputStream input,
3731         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3732         throws java.io.IOException {
3733       return PARSER.parseFrom(input, extensionRegistry);
3734     }
3735 
3736     public static Builder newBuilder() { return Builder.create(); }
3737     public Builder newBuilderForType() { return newBuilder(); }
3738     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope prototype) {
3739       return newBuilder().mergeFrom(prototype);
3740     }
3741     public Builder toBuilder() { return newBuilder(this); }
3742 
3743     @java.lang.Override
3744     protected Builder newBuilderForType(
3745         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
3746       Builder builder = new Builder(parent);
3747       return builder;
3748     }
3749     /**
3750      * Protobuf type {@code hbase.pb.FamilyScope}
3751      */
3752     public static final class Builder extends
3753         com.google.protobuf.GeneratedMessage.Builder<Builder>
3754        implements org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScopeOrBuilder {
3755       public static final com.google.protobuf.Descriptors.Descriptor
3756           getDescriptor() {
3757         return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_FamilyScope_descriptor;
3758       }
3759 
3760       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
3761           internalGetFieldAccessorTable() {
3762         return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_FamilyScope_fieldAccessorTable
3763             .ensureFieldAccessorsInitialized(
3764                 org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder.class);
3765       }
3766 
3767       // Construct using org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.newBuilder()
3768       private Builder() {
3769         maybeForceBuilderInitialization();
3770       }
3771 
3772       private Builder(
3773           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
3774         super(parent);
3775         maybeForceBuilderInitialization();
3776       }
3777       private void maybeForceBuilderInitialization() {
3778         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
3779         }
3780       }
3781       private static Builder create() {
3782         return new Builder();
3783       }
3784 
3785       public Builder clear() {
3786         super.clear();
3787         family_ = com.google.protobuf.ByteString.EMPTY;
3788         bitField0_ = (bitField0_ & ~0x00000001);
3789         scopeType_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType.REPLICATION_SCOPE_LOCAL;
3790         bitField0_ = (bitField0_ & ~0x00000002);
3791         return this;
3792       }
3793 
3794       public Builder clone() {
3795         return create().mergeFrom(buildPartial());
3796       }
3797 
3798       public com.google.protobuf.Descriptors.Descriptor
3799           getDescriptorForType() {
3800         return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_FamilyScope_descriptor;
3801       }
3802 
3803       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope getDefaultInstanceForType() {
3804         return org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.getDefaultInstance();
3805       }
3806 
3807       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope build() {
3808         org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope result = buildPartial();
3809         if (!result.isInitialized()) {
3810           throw newUninitializedMessageException(result);
3811         }
3812         return result;
3813       }
3814 
3815       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope buildPartial() {
3816         org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope(this);
3817         int from_bitField0_ = bitField0_;
3818         int to_bitField0_ = 0;
3819         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
3820           to_bitField0_ |= 0x00000001;
3821         }
3822         result.family_ = family_;
3823         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
3824           to_bitField0_ |= 0x00000002;
3825         }
3826         result.scopeType_ = scopeType_;
3827         result.bitField0_ = to_bitField0_;
3828         onBuilt();
3829         return result;
3830       }
3831 
3832       public Builder mergeFrom(com.google.protobuf.Message other) {
3833         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope) {
3834           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope)other);
3835         } else {
3836           super.mergeFrom(other);
3837           return this;
3838         }
3839       }
3840 
3841       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope other) {
3842         if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.getDefaultInstance()) return this;
3843         if (other.hasFamily()) {
3844           setFamily(other.getFamily());
3845         }
3846         if (other.hasScopeType()) {
3847           setScopeType(other.getScopeType());
3848         }
3849         this.mergeUnknownFields(other.getUnknownFields());
3850         return this;
3851       }
3852 
3853       public final boolean isInitialized() {
3854         if (!hasFamily()) {
3855           
3856           return false;
3857         }
3858         if (!hasScopeType()) {
3859           
3860           return false;
3861         }
3862         return true;
3863       }
3864 
3865       public Builder mergeFrom(
3866           com.google.protobuf.CodedInputStream input,
3867           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3868           throws java.io.IOException {
3869         org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parsedMessage = null;
3870         try {
3871           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
3872         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
3873           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope) e.getUnfinishedMessage();
3874           throw e;
3875         } finally {
3876           if (parsedMessage != null) {
3877             mergeFrom(parsedMessage);
3878           }
3879         }
3880         return this;
3881       }
3882       private int bitField0_;
3883 
3884       // required bytes family = 1;
3885       private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY;
3886       /**
3887        * <code>required bytes family = 1;</code>
3888        */
3889       public boolean hasFamily() {
3890         return ((bitField0_ & 0x00000001) == 0x00000001);
3891       }
3892       /**
3893        * <code>required bytes family = 1;</code>
3894        */
3895       public com.google.protobuf.ByteString getFamily() {
3896         return family_;
3897       }
3898       /**
3899        * <code>required bytes family = 1;</code>
3900        */
3901       public Builder setFamily(com.google.protobuf.ByteString value) {
3902         if (value == null) {
3903     throw new NullPointerException();
3904   }
3905   bitField0_ |= 0x00000001;
3906         family_ = value;
3907         onChanged();
3908         return this;
3909       }
3910       /**
3911        * <code>required bytes family = 1;</code>
3912        */
3913       public Builder clearFamily() {
3914         bitField0_ = (bitField0_ & ~0x00000001);
3915         family_ = getDefaultInstance().getFamily();
3916         onChanged();
3917         return this;
3918       }
3919 
3920       // required .hbase.pb.ScopeType scope_type = 2;
3921       private org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType scopeType_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType.REPLICATION_SCOPE_LOCAL;
3922       /**
3923        * <code>required .hbase.pb.ScopeType scope_type = 2;</code>
3924        */
3925       public boolean hasScopeType() {
3926         return ((bitField0_ & 0x00000002) == 0x00000002);
3927       }
3928       /**
3929        * <code>required .hbase.pb.ScopeType scope_type = 2;</code>
3930        */
3931       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType getScopeType() {
3932         return scopeType_;
3933       }
3934       /**
3935        * <code>required .hbase.pb.ScopeType scope_type = 2;</code>
3936        */
3937       public Builder setScopeType(org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType value) {
3938         if (value == null) {
3939           throw new NullPointerException();
3940         }
3941         bitField0_ |= 0x00000002;
3942         scopeType_ = value;
3943         onChanged();
3944         return this;
3945       }
3946       /**
3947        * <code>required .hbase.pb.ScopeType scope_type = 2;</code>
3948        */
3949       public Builder clearScopeType() {
3950         bitField0_ = (bitField0_ & ~0x00000002);
3951         scopeType_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType.REPLICATION_SCOPE_LOCAL;
3952         onChanged();
3953         return this;
3954       }
3955 
3956       // @@protoc_insertion_point(builder_scope:hbase.pb.FamilyScope)
3957     }
3958 
3959     static {
3960       defaultInstance = new FamilyScope(true);
3961       defaultInstance.initFields();
3962     }
3963 
3964     // @@protoc_insertion_point(class_scope:hbase.pb.FamilyScope)
3965   }
3966 
3967   public interface CompactionDescriptorOrBuilder
3968       extends com.google.protobuf.MessageOrBuilder {
3969 
3970     // required bytes table_name = 1;
3971     /**
3972      * <code>required bytes table_name = 1;</code>
3973      *
3974      * <pre>
3975      * TODO: WALKey already stores these, might remove
3976      * </pre>
3977      */
3978     boolean hasTableName();
3979     /**
3980      * <code>required bytes table_name = 1;</code>
3981      *
3982      * <pre>
3983      * TODO: WALKey already stores these, might remove
3984      * </pre>
3985      */
3986     com.google.protobuf.ByteString getTableName();
3987 
3988     // required bytes encoded_region_name = 2;
3989     /**
3990      * <code>required bytes encoded_region_name = 2;</code>
3991      */
3992     boolean hasEncodedRegionName();
3993     /**
3994      * <code>required bytes encoded_region_name = 2;</code>
3995      */
3996     com.google.protobuf.ByteString getEncodedRegionName();
3997 
3998     // required bytes family_name = 3;
3999     /**
4000      * <code>required bytes family_name = 3;</code>
4001      */
4002     boolean hasFamilyName();
4003     /**
4004      * <code>required bytes family_name = 3;</code>
4005      */
4006     com.google.protobuf.ByteString getFamilyName();
4007 
4008     // repeated string compaction_input = 4;
4009     /**
4010      * <code>repeated string compaction_input = 4;</code>
4011      *
4012      * <pre>
4013      * relative to store dir
4014      * </pre>
4015      */
4016     java.util.List<java.lang.String>
4017     getCompactionInputList();
4018     /**
4019      * <code>repeated string compaction_input = 4;</code>
4020      *
4021      * <pre>
4022      * relative to store dir
4023      * </pre>
4024      */
4025     int getCompactionInputCount();
4026     /**
4027      * <code>repeated string compaction_input = 4;</code>
4028      *
4029      * <pre>
4030      * relative to store dir
4031      * </pre>
4032      */
4033     java.lang.String getCompactionInput(int index);
4034     /**
4035      * <code>repeated string compaction_input = 4;</code>
4036      *
4037      * <pre>
4038      * relative to store dir
4039      * </pre>
4040      */
4041     com.google.protobuf.ByteString
4042         getCompactionInputBytes(int index);
4043 
4044     // repeated string compaction_output = 5;
4045     /**
4046      * <code>repeated string compaction_output = 5;</code>
4047      */
4048     java.util.List<java.lang.String>
4049     getCompactionOutputList();
4050     /**
4051      * <code>repeated string compaction_output = 5;</code>
4052      */
4053     int getCompactionOutputCount();
4054     /**
4055      * <code>repeated string compaction_output = 5;</code>
4056      */
4057     java.lang.String getCompactionOutput(int index);
4058     /**
4059      * <code>repeated string compaction_output = 5;</code>
4060      */
4061     com.google.protobuf.ByteString
4062         getCompactionOutputBytes(int index);
4063 
4064     // required string store_home_dir = 6;
4065     /**
4066      * <code>required string store_home_dir = 6;</code>
4067      *
4068      * <pre>
4069      * relative to region dir
4070      * </pre>
4071      */
4072     boolean hasStoreHomeDir();
4073     /**
4074      * <code>required string store_home_dir = 6;</code>
4075      *
4076      * <pre>
4077      * relative to region dir
4078      * </pre>
4079      */
4080     java.lang.String getStoreHomeDir();
4081     /**
4082      * <code>required string store_home_dir = 6;</code>
4083      *
4084      * <pre>
4085      * relative to region dir
4086      * </pre>
4087      */
4088     com.google.protobuf.ByteString
4089         getStoreHomeDirBytes();
4090 
4091     // optional bytes region_name = 7;
4092     /**
4093      * <code>optional bytes region_name = 7;</code>
4094      *
4095      * <pre>
4096      * full region name
4097      * </pre>
4098      */
4099     boolean hasRegionName();
4100     /**
4101      * <code>optional bytes region_name = 7;</code>
4102      *
4103      * <pre>
4104      * full region name
4105      * </pre>
4106      */
4107     com.google.protobuf.ByteString getRegionName();
4108   }
4109   /**
4110    * Protobuf type {@code hbase.pb.CompactionDescriptor}
4111    *
4112    * <pre>
4113    **
4114    * Special WAL entry to hold all related to a compaction.
4115    * Written to WAL before completing compaction.  There is
4116    * sufficient info in the below message to complete later
4117    * the * compaction should we fail the WAL write.
4118    * </pre>
4119    */
4120   public static final class CompactionDescriptor extends
4121       com.google.protobuf.GeneratedMessage
4122       implements CompactionDescriptorOrBuilder {
4123     // Use CompactionDescriptor.newBuilder() to construct.
4124     private CompactionDescriptor(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
4125       super(builder);
4126       this.unknownFields = builder.getUnknownFields();
4127     }
4128     private CompactionDescriptor(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
4129 
4130     private static final CompactionDescriptor defaultInstance;
4131     public static CompactionDescriptor getDefaultInstance() {
4132       return defaultInstance;
4133     }
4134 
4135     public CompactionDescriptor getDefaultInstanceForType() {
4136       return defaultInstance;
4137     }
4138 
4139     private final com.google.protobuf.UnknownFieldSet unknownFields;
4140     @java.lang.Override
4141     public final com.google.protobuf.UnknownFieldSet
4142         getUnknownFields() {
4143       return this.unknownFields;
4144     }
4145     private CompactionDescriptor(
4146         com.google.protobuf.CodedInputStream input,
4147         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4148         throws com.google.protobuf.InvalidProtocolBufferException {
4149       initFields();
4150       int mutable_bitField0_ = 0;
4151       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
4152           com.google.protobuf.UnknownFieldSet.newBuilder();
4153       try {
4154         boolean done = false;
4155         while (!done) {
4156           int tag = input.readTag();
4157           switch (tag) {
4158             case 0:
4159               done = true;
4160               break;
4161             default: {
4162               if (!parseUnknownField(input, unknownFields,
4163                                      extensionRegistry, tag)) {
4164                 done = true;
4165               }
4166               break;
4167             }
4168             case 10: {
4169               bitField0_ |= 0x00000001;
4170               tableName_ = input.readBytes();
4171               break;
4172             }
4173             case 18: {
4174               bitField0_ |= 0x00000002;
4175               encodedRegionName_ = input.readBytes();
4176               break;
4177             }
4178             case 26: {
4179               bitField0_ |= 0x00000004;
4180               familyName_ = input.readBytes();
4181               break;
4182             }
4183             case 34: {
4184               if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
4185                 compactionInput_ = new com.google.protobuf.LazyStringArrayList();
4186                 mutable_bitField0_ |= 0x00000008;
4187               }
4188               compactionInput_.add(input.readBytes());
4189               break;
4190             }
4191             case 42: {
4192               if (!((mutable_bitField0_ & 0x00000010) == 0x00000010)) {
4193                 compactionOutput_ = new com.google.protobuf.LazyStringArrayList();
4194                 mutable_bitField0_ |= 0x00000010;
4195               }
4196               compactionOutput_.add(input.readBytes());
4197               break;
4198             }
4199             case 50: {
4200               bitField0_ |= 0x00000008;
4201               storeHomeDir_ = input.readBytes();
4202               break;
4203             }
4204             case 58: {
4205               bitField0_ |= 0x00000010;
4206               regionName_ = input.readBytes();
4207               break;
4208             }
4209           }
4210         }
4211       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
4212         throw e.setUnfinishedMessage(this);
4213       } catch (java.io.IOException e) {
4214         throw new com.google.protobuf.InvalidProtocolBufferException(
4215             e.getMessage()).setUnfinishedMessage(this);
4216       } finally {
4217         if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
4218           compactionInput_ = new com.google.protobuf.UnmodifiableLazyStringList(compactionInput_);
4219         }
4220         if (((mutable_bitField0_ & 0x00000010) == 0x00000010)) {
4221           compactionOutput_ = new com.google.protobuf.UnmodifiableLazyStringList(compactionOutput_);
4222         }
4223         this.unknownFields = unknownFields.build();
4224         makeExtensionsImmutable();
4225       }
4226     }
4227     public static final com.google.protobuf.Descriptors.Descriptor
4228         getDescriptor() {
4229       return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_CompactionDescriptor_descriptor;
4230     }
4231 
4232     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
4233         internalGetFieldAccessorTable() {
4234       return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_CompactionDescriptor_fieldAccessorTable
4235           .ensureFieldAccessorsInitialized(
4236               org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor.Builder.class);
4237     }
4238 
4239     public static com.google.protobuf.Parser<CompactionDescriptor> PARSER =
4240         new com.google.protobuf.AbstractParser<CompactionDescriptor>() {
4241       public CompactionDescriptor parsePartialFrom(
4242           com.google.protobuf.CodedInputStream input,
4243           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4244           throws com.google.protobuf.InvalidProtocolBufferException {
4245         return new CompactionDescriptor(input, extensionRegistry);
4246       }
4247     };
4248 
4249     @java.lang.Override
4250     public com.google.protobuf.Parser<CompactionDescriptor> getParserForType() {
4251       return PARSER;
4252     }
4253 
4254     private int bitField0_;
4255     // required bytes table_name = 1;
4256     public static final int TABLE_NAME_FIELD_NUMBER = 1;
4257     private com.google.protobuf.ByteString tableName_;
4258     /**
4259      * <code>required bytes table_name = 1;</code>
4260      *
4261      * <pre>
4262      * TODO: WALKey already stores these, might remove
4263      * </pre>
4264      */
4265     public boolean hasTableName() {
4266       return ((bitField0_ & 0x00000001) == 0x00000001);
4267     }
4268     /**
4269      * <code>required bytes table_name = 1;</code>
4270      *
4271      * <pre>
4272      * TODO: WALKey already stores these, might remove
4273      * </pre>
4274      */
4275     public com.google.protobuf.ByteString getTableName() {
4276       return tableName_;
4277     }
4278 
4279     // required bytes encoded_region_name = 2;
4280     public static final int ENCODED_REGION_NAME_FIELD_NUMBER = 2;
4281     private com.google.protobuf.ByteString encodedRegionName_;
4282     /**
4283      * <code>required bytes encoded_region_name = 2;</code>
4284      */
4285     public boolean hasEncodedRegionName() {
4286       return ((bitField0_ & 0x00000002) == 0x00000002);
4287     }
4288     /**
4289      * <code>required bytes encoded_region_name = 2;</code>
4290      */
4291     public com.google.protobuf.ByteString getEncodedRegionName() {
4292       return encodedRegionName_;
4293     }
4294 
4295     // required bytes family_name = 3;
4296     public static final int FAMILY_NAME_FIELD_NUMBER = 3;
4297     private com.google.protobuf.ByteString familyName_;
4298     /**
4299      * <code>required bytes family_name = 3;</code>
4300      */
4301     public boolean hasFamilyName() {
4302       return ((bitField0_ & 0x00000004) == 0x00000004);
4303     }
4304     /**
4305      * <code>required bytes family_name = 3;</code>
4306      */
4307     public com.google.protobuf.ByteString getFamilyName() {
4308       return familyName_;
4309     }
4310 
4311     // repeated string compaction_input = 4;
4312     public static final int COMPACTION_INPUT_FIELD_NUMBER = 4;
4313     private com.google.protobuf.LazyStringList compactionInput_;
4314     /**
4315      * <code>repeated string compaction_input = 4;</code>
4316      *
4317      * <pre>
4318      * relative to store dir
4319      * </pre>
4320      */
4321     public java.util.List<java.lang.String>
4322         getCompactionInputList() {
4323       return compactionInput_;
4324     }
4325     /**
4326      * <code>repeated string compaction_input = 4;</code>
4327      *
4328      * <pre>
4329      * relative to store dir
4330      * </pre>
4331      */
4332     public int getCompactionInputCount() {
4333       return compactionInput_.size();
4334     }
4335     /**
4336      * <code>repeated string compaction_input = 4;</code>
4337      *
4338      * <pre>
4339      * relative to store dir
4340      * </pre>
4341      */
4342     public java.lang.String getCompactionInput(int index) {
4343       return compactionInput_.get(index);
4344     }
4345     /**
4346      * <code>repeated string compaction_input = 4;</code>
4347      *
4348      * <pre>
4349      * relative to store dir
4350      * </pre>
4351      */
4352     public com.google.protobuf.ByteString
4353         getCompactionInputBytes(int index) {
4354       return compactionInput_.getByteString(index);
4355     }
4356 
4357     // repeated string compaction_output = 5;
4358     public static final int COMPACTION_OUTPUT_FIELD_NUMBER = 5;
4359     private com.google.protobuf.LazyStringList compactionOutput_;
4360     /**
4361      * <code>repeated string compaction_output = 5;</code>
4362      */
4363     public java.util.List<java.lang.String>
4364         getCompactionOutputList() {
4365       return compactionOutput_;
4366     }
4367     /**
4368      * <code>repeated string compaction_output = 5;</code>
4369      */
4370     public int getCompactionOutputCount() {
4371       return compactionOutput_.size();
4372     }
4373     /**
4374      * <code>repeated string compaction_output = 5;</code>
4375      */
4376     public java.lang.String getCompactionOutput(int index) {
4377       return compactionOutput_.get(index);
4378     }
4379     /**
4380      * <code>repeated string compaction_output = 5;</code>
4381      */
4382     public com.google.protobuf.ByteString
4383         getCompactionOutputBytes(int index) {
4384       return compactionOutput_.getByteString(index);
4385     }
4386 
4387     // required string store_home_dir = 6;
4388     public static final int STORE_HOME_DIR_FIELD_NUMBER = 6;
4389     private java.lang.Object storeHomeDir_;
4390     /**
4391      * <code>required string store_home_dir = 6;</code>
4392      *
4393      * <pre>
4394      * relative to region dir
4395      * </pre>
4396      */
4397     public boolean hasStoreHomeDir() {
4398       return ((bitField0_ & 0x00000008) == 0x00000008);
4399     }
4400     /**
4401      * <code>required string store_home_dir = 6;</code>
4402      *
4403      * <pre>
4404      * relative to region dir
4405      * </pre>
4406      */
4407     public java.lang.String getStoreHomeDir() {
4408       java.lang.Object ref = storeHomeDir_;
4409       if (ref instanceof java.lang.String) {
4410         return (java.lang.String) ref;
4411       } else {
4412         com.google.protobuf.ByteString bs = 
4413             (com.google.protobuf.ByteString) ref;
4414         java.lang.String s = bs.toStringUtf8();
4415         if (bs.isValidUtf8()) {
4416           storeHomeDir_ = s;
4417         }
4418         return s;
4419       }
4420     }
4421     /**
4422      * <code>required string store_home_dir = 6;</code>
4423      *
4424      * <pre>
4425      * relative to region dir
4426      * </pre>
4427      */
4428     public com.google.protobuf.ByteString
4429         getStoreHomeDirBytes() {
4430       java.lang.Object ref = storeHomeDir_;
4431       if (ref instanceof java.lang.String) {
4432         com.google.protobuf.ByteString b = 
4433             com.google.protobuf.ByteString.copyFromUtf8(
4434                 (java.lang.String) ref);
4435         storeHomeDir_ = b;
4436         return b;
4437       } else {
4438         return (com.google.protobuf.ByteString) ref;
4439       }
4440     }
4441 
4442     // optional bytes region_name = 7;
4443     public static final int REGION_NAME_FIELD_NUMBER = 7;
4444     private com.google.protobuf.ByteString regionName_;
4445     /**
4446      * <code>optional bytes region_name = 7;</code>
4447      *
4448      * <pre>
4449      * full region name
4450      * </pre>
4451      */
4452     public boolean hasRegionName() {
4453       return ((bitField0_ & 0x00000010) == 0x00000010);
4454     }
4455     /**
4456      * <code>optional bytes region_name = 7;</code>
4457      *
4458      * <pre>
4459      * full region name
4460      * </pre>
4461      */
4462     public com.google.protobuf.ByteString getRegionName() {
4463       return regionName_;
4464     }
4465 
4466     private void initFields() {
4467       tableName_ = com.google.protobuf.ByteString.EMPTY;
4468       encodedRegionName_ = com.google.protobuf.ByteString.EMPTY;
4469       familyName_ = com.google.protobuf.ByteString.EMPTY;
4470       compactionInput_ = com.google.protobuf.LazyStringArrayList.EMPTY;
4471       compactionOutput_ = com.google.protobuf.LazyStringArrayList.EMPTY;
4472       storeHomeDir_ = "";
4473       regionName_ = com.google.protobuf.ByteString.EMPTY;
4474     }
4475     private byte memoizedIsInitialized = -1;
4476     public final boolean isInitialized() {
4477       byte isInitialized = memoizedIsInitialized;
4478       if (isInitialized != -1) return isInitialized == 1;
4479 
4480       if (!hasTableName()) {
4481         memoizedIsInitialized = 0;
4482         return false;
4483       }
4484       if (!hasEncodedRegionName()) {
4485         memoizedIsInitialized = 0;
4486         return false;
4487       }
4488       if (!hasFamilyName()) {
4489         memoizedIsInitialized = 0;
4490         return false;
4491       }
4492       if (!hasStoreHomeDir()) {
4493         memoizedIsInitialized = 0;
4494         return false;
4495       }
4496       memoizedIsInitialized = 1;
4497       return true;
4498     }
4499 
4500     public void writeTo(com.google.protobuf.CodedOutputStream output)
4501                         throws java.io.IOException {
4502       getSerializedSize();
4503       if (((bitField0_ & 0x00000001) == 0x00000001)) {
4504         output.writeBytes(1, tableName_);
4505       }
4506       if (((bitField0_ & 0x00000002) == 0x00000002)) {
4507         output.writeBytes(2, encodedRegionName_);
4508       }
4509       if (((bitField0_ & 0x00000004) == 0x00000004)) {
4510         output.writeBytes(3, familyName_);
4511       }
4512       for (int i = 0; i < compactionInput_.size(); i++) {
4513         output.writeBytes(4, compactionInput_.getByteString(i));
4514       }
4515       for (int i = 0; i < compactionOutput_.size(); i++) {
4516         output.writeBytes(5, compactionOutput_.getByteString(i));
4517       }
4518       if (((bitField0_ & 0x00000008) == 0x00000008)) {
4519         output.writeBytes(6, getStoreHomeDirBytes());
4520       }
4521       if (((bitField0_ & 0x00000010) == 0x00000010)) {
4522         output.writeBytes(7, regionName_);
4523       }
4524       getUnknownFields().writeTo(output);
4525     }
4526 
4527     private int memoizedSerializedSize = -1;
4528     public int getSerializedSize() {
4529       int size = memoizedSerializedSize;
4530       if (size != -1) return size;
4531 
4532       size = 0;
4533       if (((bitField0_ & 0x00000001) == 0x00000001)) {
4534         size += com.google.protobuf.CodedOutputStream
4535           .computeBytesSize(1, tableName_);
4536       }
4537       if (((bitField0_ & 0x00000002) == 0x00000002)) {
4538         size += com.google.protobuf.CodedOutputStream
4539           .computeBytesSize(2, encodedRegionName_);
4540       }
4541       if (((bitField0_ & 0x00000004) == 0x00000004)) {
4542         size += com.google.protobuf.CodedOutputStream
4543           .computeBytesSize(3, familyName_);
4544       }
4545       {
4546         int dataSize = 0;
4547         for (int i = 0; i < compactionInput_.size(); i++) {
4548           dataSize += com.google.protobuf.CodedOutputStream
4549             .computeBytesSizeNoTag(compactionInput_.getByteString(i));
4550         }
4551         size += dataSize;
4552         size += 1 * getCompactionInputList().size();
4553       }
4554       {
4555         int dataSize = 0;
4556         for (int i = 0; i < compactionOutput_.size(); i++) {
4557           dataSize += com.google.protobuf.CodedOutputStream
4558             .computeBytesSizeNoTag(compactionOutput_.getByteString(i));
4559         }
4560         size += dataSize;
4561         size += 1 * getCompactionOutputList().size();
4562       }
4563       if (((bitField0_ & 0x00000008) == 0x00000008)) {
4564         size += com.google.protobuf.CodedOutputStream
4565           .computeBytesSize(6, getStoreHomeDirBytes());
4566       }
4567       if (((bitField0_ & 0x00000010) == 0x00000010)) {
4568         size += com.google.protobuf.CodedOutputStream
4569           .computeBytesSize(7, regionName_);
4570       }
4571       size += getUnknownFields().getSerializedSize();
4572       memoizedSerializedSize = size;
4573       return size;
4574     }
4575 
4576     private static final long serialVersionUID = 0L;
4577     @java.lang.Override
4578     protected java.lang.Object writeReplace()
4579         throws java.io.ObjectStreamException {
4580       return super.writeReplace();
4581     }
4582 
4583     @java.lang.Override
4584     public boolean equals(final java.lang.Object obj) {
4585       if (obj == this) {
4586        return true;
4587       }
4588       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor)) {
4589         return super.equals(obj);
4590       }
4591       org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor other = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor) obj;
4592 
4593       boolean result = true;
4594       result = result && (hasTableName() == other.hasTableName());
4595       if (hasTableName()) {
4596         result = result && getTableName()
4597             .equals(other.getTableName());
4598       }
4599       result = result && (hasEncodedRegionName() == other.hasEncodedRegionName());
4600       if (hasEncodedRegionName()) {
4601         result = result && getEncodedRegionName()
4602             .equals(other.getEncodedRegionName());
4603       }
4604       result = result && (hasFamilyName() == other.hasFamilyName());
4605       if (hasFamilyName()) {
4606         result = result && getFamilyName()
4607             .equals(other.getFamilyName());
4608       }
4609       result = result && getCompactionInputList()
4610           .equals(other.getCompactionInputList());
4611       result = result && getCompactionOutputList()
4612           .equals(other.getCompactionOutputList());
4613       result = result && (hasStoreHomeDir() == other.hasStoreHomeDir());
4614       if (hasStoreHomeDir()) {
4615         result = result && getStoreHomeDir()
4616             .equals(other.getStoreHomeDir());
4617       }
4618       result = result && (hasRegionName() == other.hasRegionName());
4619       if (hasRegionName()) {
4620         result = result && getRegionName()
4621             .equals(other.getRegionName());
4622       }
4623       result = result &&
4624           getUnknownFields().equals(other.getUnknownFields());
4625       return result;
4626     }
4627 
4628     private int memoizedHashCode = 0;
4629     @java.lang.Override
4630     public int hashCode() {
4631       if (memoizedHashCode != 0) {
4632         return memoizedHashCode;
4633       }
4634       int hash = 41;
4635       hash = (19 * hash) + getDescriptorForType().hashCode();
4636       if (hasTableName()) {
4637         hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER;
4638         hash = (53 * hash) + getTableName().hashCode();
4639       }
4640       if (hasEncodedRegionName()) {
4641         hash = (37 * hash) + ENCODED_REGION_NAME_FIELD_NUMBER;
4642         hash = (53 * hash) + getEncodedRegionName().hashCode();
4643       }
4644       if (hasFamilyName()) {
4645         hash = (37 * hash) + FAMILY_NAME_FIELD_NUMBER;
4646         hash = (53 * hash) + getFamilyName().hashCode();
4647       }
4648       if (getCompactionInputCount() > 0) {
4649         hash = (37 * hash) + COMPACTION_INPUT_FIELD_NUMBER;
4650         hash = (53 * hash) + getCompactionInputList().hashCode();
4651       }
4652       if (getCompactionOutputCount() > 0) {
4653         hash = (37 * hash) + COMPACTION_OUTPUT_FIELD_NUMBER;
4654         hash = (53 * hash) + getCompactionOutputList().hashCode();
4655       }
4656       if (hasStoreHomeDir()) {
4657         hash = (37 * hash) + STORE_HOME_DIR_FIELD_NUMBER;
4658         hash = (53 * hash) + getStoreHomeDir().hashCode();
4659       }
4660       if (hasRegionName()) {
4661         hash = (37 * hash) + REGION_NAME_FIELD_NUMBER;
4662         hash = (53 * hash) + getRegionName().hashCode();
4663       }
4664       hash = (29 * hash) + getUnknownFields().hashCode();
4665       memoizedHashCode = hash;
4666       return hash;
4667     }
4668 
4669     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor parseFrom(
4670         com.google.protobuf.ByteString data)
4671         throws com.google.protobuf.InvalidProtocolBufferException {
4672       return PARSER.parseFrom(data);
4673     }
4674     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor parseFrom(
4675         com.google.protobuf.ByteString data,
4676         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4677         throws com.google.protobuf.InvalidProtocolBufferException {
4678       return PARSER.parseFrom(data, extensionRegistry);
4679     }
4680     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor parseFrom(byte[] data)
4681         throws com.google.protobuf.InvalidProtocolBufferException {
4682       return PARSER.parseFrom(data);
4683     }
4684     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor parseFrom(
4685         byte[] data,
4686         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4687         throws com.google.protobuf.InvalidProtocolBufferException {
4688       return PARSER.parseFrom(data, extensionRegistry);
4689     }
4690     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor parseFrom(java.io.InputStream input)
4691         throws java.io.IOException {
4692       return PARSER.parseFrom(input);
4693     }
4694     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor parseFrom(
4695         java.io.InputStream input,
4696         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4697         throws java.io.IOException {
4698       return PARSER.parseFrom(input, extensionRegistry);
4699     }
4700     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor parseDelimitedFrom(java.io.InputStream input)
4701         throws java.io.IOException {
4702       return PARSER.parseDelimitedFrom(input);
4703     }
4704     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor parseDelimitedFrom(
4705         java.io.InputStream input,
4706         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4707         throws java.io.IOException {
4708       return PARSER.parseDelimitedFrom(input, extensionRegistry);
4709     }
4710     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor parseFrom(
4711         com.google.protobuf.CodedInputStream input)
4712         throws java.io.IOException {
4713       return PARSER.parseFrom(input);
4714     }
4715     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor parseFrom(
4716         com.google.protobuf.CodedInputStream input,
4717         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4718         throws java.io.IOException {
4719       return PARSER.parseFrom(input, extensionRegistry);
4720     }
4721 
4722     public static Builder newBuilder() { return Builder.create(); }
4723     public Builder newBuilderForType() { return newBuilder(); }
4724     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor prototype) {
4725       return newBuilder().mergeFrom(prototype);
4726     }
4727     public Builder toBuilder() { return newBuilder(this); }
4728 
4729     @java.lang.Override
4730     protected Builder newBuilderForType(
4731         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
4732       Builder builder = new Builder(parent);
4733       return builder;
4734     }
4735     /**
4736      * Protobuf type {@code hbase.pb.CompactionDescriptor}
4737      *
4738      * <pre>
4739      **
4740      * Special WAL entry to hold all related to a compaction.
4741      * Written to WAL before completing compaction.  There is
4742      * sufficient info in the below message to complete later
4743      * the * compaction should we fail the WAL write.
4744      * </pre>
4745      */
4746     public static final class Builder extends
4747         com.google.protobuf.GeneratedMessage.Builder<Builder>
4748        implements org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptorOrBuilder {
4749       public static final com.google.protobuf.Descriptors.Descriptor
4750           getDescriptor() {
4751         return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_CompactionDescriptor_descriptor;
4752       }
4753 
4754       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
4755           internalGetFieldAccessorTable() {
4756         return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_CompactionDescriptor_fieldAccessorTable
4757             .ensureFieldAccessorsInitialized(
4758                 org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor.Builder.class);
4759       }
4760 
4761       // Construct using org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor.newBuilder()
4762       private Builder() {
4763         maybeForceBuilderInitialization();
4764       }
4765 
4766       private Builder(
4767           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
4768         super(parent);
4769         maybeForceBuilderInitialization();
4770       }
4771       private void maybeForceBuilderInitialization() {
4772         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
4773         }
4774       }
4775       private static Builder create() {
4776         return new Builder();
4777       }
4778 
4779       public Builder clear() {
4780         super.clear();
4781         tableName_ = com.google.protobuf.ByteString.EMPTY;
4782         bitField0_ = (bitField0_ & ~0x00000001);
4783         encodedRegionName_ = com.google.protobuf.ByteString.EMPTY;
4784         bitField0_ = (bitField0_ & ~0x00000002);
4785         familyName_ = com.google.protobuf.ByteString.EMPTY;
4786         bitField0_ = (bitField0_ & ~0x00000004);
4787         compactionInput_ = com.google.protobuf.LazyStringArrayList.EMPTY;
4788         bitField0_ = (bitField0_ & ~0x00000008);
4789         compactionOutput_ = com.google.protobuf.LazyStringArrayList.EMPTY;
4790         bitField0_ = (bitField0_ & ~0x00000010);
4791         storeHomeDir_ = "";
4792         bitField0_ = (bitField0_ & ~0x00000020);
4793         regionName_ = com.google.protobuf.ByteString.EMPTY;
4794         bitField0_ = (bitField0_ & ~0x00000040);
4795         return this;
4796       }
4797 
4798       public Builder clone() {
4799         return create().mergeFrom(buildPartial());
4800       }
4801 
4802       public com.google.protobuf.Descriptors.Descriptor
4803           getDescriptorForType() {
4804         return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_CompactionDescriptor_descriptor;
4805       }
4806 
4807       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor getDefaultInstanceForType() {
4808         return org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor.getDefaultInstance();
4809       }
4810 
4811       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor build() {
4812         org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor result = buildPartial();
4813         if (!result.isInitialized()) {
4814           throw newUninitializedMessageException(result);
4815         }
4816         return result;
4817       }
4818 
4819       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor buildPartial() {
4820         org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor(this);
4821         int from_bitField0_ = bitField0_;
4822         int to_bitField0_ = 0;
4823         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
4824           to_bitField0_ |= 0x00000001;
4825         }
4826         result.tableName_ = tableName_;
4827         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
4828           to_bitField0_ |= 0x00000002;
4829         }
4830         result.encodedRegionName_ = encodedRegionName_;
4831         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
4832           to_bitField0_ |= 0x00000004;
4833         }
4834         result.familyName_ = familyName_;
4835         if (((bitField0_ & 0x00000008) == 0x00000008)) {
4836           compactionInput_ = new com.google.protobuf.UnmodifiableLazyStringList(
4837               compactionInput_);
4838           bitField0_ = (bitField0_ & ~0x00000008);
4839         }
4840         result.compactionInput_ = compactionInput_;
4841         if (((bitField0_ & 0x00000010) == 0x00000010)) {
4842           compactionOutput_ = new com.google.protobuf.UnmodifiableLazyStringList(
4843               compactionOutput_);
4844           bitField0_ = (bitField0_ & ~0x00000010);
4845         }
4846         result.compactionOutput_ = compactionOutput_;
4847         if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
4848           to_bitField0_ |= 0x00000008;
4849         }
4850         result.storeHomeDir_ = storeHomeDir_;
4851         if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
4852           to_bitField0_ |= 0x00000010;
4853         }
4854         result.regionName_ = regionName_;
4855         result.bitField0_ = to_bitField0_;
4856         onBuilt();
4857         return result;
4858       }
4859 
4860       public Builder mergeFrom(com.google.protobuf.Message other) {
4861         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor) {
4862           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor)other);
4863         } else {
4864           super.mergeFrom(other);
4865           return this;
4866         }
4867       }
4868 
4869       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor other) {
4870         if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor.getDefaultInstance()) return this;
4871         if (other.hasTableName()) {
4872           setTableName(other.getTableName());
4873         }
4874         if (other.hasEncodedRegionName()) {
4875           setEncodedRegionName(other.getEncodedRegionName());
4876         }
4877         if (other.hasFamilyName()) {
4878           setFamilyName(other.getFamilyName());
4879         }
4880         if (!other.compactionInput_.isEmpty()) {
4881           if (compactionInput_.isEmpty()) {
4882             compactionInput_ = other.compactionInput_;
4883             bitField0_ = (bitField0_ & ~0x00000008);
4884           } else {
4885             ensureCompactionInputIsMutable();
4886             compactionInput_.addAll(other.compactionInput_);
4887           }
4888           onChanged();
4889         }
4890         if (!other.compactionOutput_.isEmpty()) {
4891           if (compactionOutput_.isEmpty()) {
4892             compactionOutput_ = other.compactionOutput_;
4893             bitField0_ = (bitField0_ & ~0x00000010);
4894           } else {
4895             ensureCompactionOutputIsMutable();
4896             compactionOutput_.addAll(other.compactionOutput_);
4897           }
4898           onChanged();
4899         }
4900         if (other.hasStoreHomeDir()) {
4901           bitField0_ |= 0x00000020;
4902           storeHomeDir_ = other.storeHomeDir_;
4903           onChanged();
4904         }
4905         if (other.hasRegionName()) {
4906           setRegionName(other.getRegionName());
4907         }
4908         this.mergeUnknownFields(other.getUnknownFields());
4909         return this;
4910       }
4911 
4912       public final boolean isInitialized() {
4913         if (!hasTableName()) {
4914           
4915           return false;
4916         }
4917         if (!hasEncodedRegionName()) {
4918           
4919           return false;
4920         }
4921         if (!hasFamilyName()) {
4922           
4923           return false;
4924         }
4925         if (!hasStoreHomeDir()) {
4926           
4927           return false;
4928         }
4929         return true;
4930       }
4931 
4932       public Builder mergeFrom(
4933           com.google.protobuf.CodedInputStream input,
4934           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4935           throws java.io.IOException {
4936         org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor parsedMessage = null;
4937         try {
4938           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
4939         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
4940           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor) e.getUnfinishedMessage();
4941           throw e;
4942         } finally {
4943           if (parsedMessage != null) {
4944             mergeFrom(parsedMessage);
4945           }
4946         }
4947         return this;
4948       }
4949       private int bitField0_;
4950 
4951       // required bytes table_name = 1;
4952       private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY;
4953       /**
4954        * <code>required bytes table_name = 1;</code>
4955        *
4956        * <pre>
4957        * TODO: WALKey already stores these, might remove
4958        * </pre>
4959        */
4960       public boolean hasTableName() {
4961         return ((bitField0_ & 0x00000001) == 0x00000001);
4962       }
4963       /**
4964        * <code>required bytes table_name = 1;</code>
4965        *
4966        * <pre>
4967        * TODO: WALKey already stores these, might remove
4968        * </pre>
4969        */
4970       public com.google.protobuf.ByteString getTableName() {
4971         return tableName_;
4972       }
4973       /**
4974        * <code>required bytes table_name = 1;</code>
4975        *
4976        * <pre>
4977        * TODO: WALKey already stores these, might remove
4978        * </pre>
4979        */
4980       public Builder setTableName(com.google.protobuf.ByteString value) {
4981         if (value == null) {
4982     throw new NullPointerException();
4983   }
4984   bitField0_ |= 0x00000001;
4985         tableName_ = value;
4986         onChanged();
4987         return this;
4988       }
4989       /**
4990        * <code>required bytes table_name = 1;</code>
4991        *
4992        * <pre>
4993        * TODO: WALKey already stores these, might remove
4994        * </pre>
4995        */
4996       public Builder clearTableName() {
4997         bitField0_ = (bitField0_ & ~0x00000001);
4998         tableName_ = getDefaultInstance().getTableName();
4999         onChanged();
5000         return this;
5001       }
5002 
5003       // required bytes encoded_region_name = 2;
5004       private com.google.protobuf.ByteString encodedRegionName_ = com.google.protobuf.ByteString.EMPTY;
5005       /**
5006        * <code>required bytes encoded_region_name = 2;</code>
5007        */
5008       public boolean hasEncodedRegionName() {
5009         return ((bitField0_ & 0x00000002) == 0x00000002);
5010       }
5011       /**
5012        * <code>required bytes encoded_region_name = 2;</code>
5013        */
5014       public com.google.protobuf.ByteString getEncodedRegionName() {
5015         return encodedRegionName_;
5016       }
5017       /**
5018        * <code>required bytes encoded_region_name = 2;</code>
5019        */
5020       public Builder setEncodedRegionName(com.google.protobuf.ByteString value) {
5021         if (value == null) {
5022     throw new NullPointerException();
5023   }
5024   bitField0_ |= 0x00000002;
5025         encodedRegionName_ = value;
5026         onChanged();
5027         return this;
5028       }
5029       /**
5030        * <code>required bytes encoded_region_name = 2;</code>
5031        */
5032       public Builder clearEncodedRegionName() {
5033         bitField0_ = (bitField0_ & ~0x00000002);
5034         encodedRegionName_ = getDefaultInstance().getEncodedRegionName();
5035         onChanged();
5036         return this;
5037       }
5038 
5039       // required bytes family_name = 3;
5040       private com.google.protobuf.ByteString familyName_ = com.google.protobuf.ByteString.EMPTY;
5041       /**
5042        * <code>required bytes family_name = 3;</code>
5043        */
5044       public boolean hasFamilyName() {
5045         return ((bitField0_ & 0x00000004) == 0x00000004);
5046       }
5047       /**
5048        * <code>required bytes family_name = 3;</code>
5049        */
5050       public com.google.protobuf.ByteString getFamilyName() {
5051         return familyName_;
5052       }
5053       /**
5054        * <code>required bytes family_name = 3;</code>
5055        */
5056       public Builder setFamilyName(com.google.protobuf.ByteString value) {
5057         if (value == null) {
5058     throw new NullPointerException();
5059   }
5060   bitField0_ |= 0x00000004;
5061         familyName_ = value;
5062         onChanged();
5063         return this;
5064       }
5065       /**
5066        * <code>required bytes family_name = 3;</code>
5067        */
5068       public Builder clearFamilyName() {
5069         bitField0_ = (bitField0_ & ~0x00000004);
5070         familyName_ = getDefaultInstance().getFamilyName();
5071         onChanged();
5072         return this;
5073       }
5074 
5075       // repeated string compaction_input = 4;
5076       private com.google.protobuf.LazyStringList compactionInput_ = com.google.protobuf.LazyStringArrayList.EMPTY;
5077       private void ensureCompactionInputIsMutable() {
5078         if (!((bitField0_ & 0x00000008) == 0x00000008)) {
5079           compactionInput_ = new com.google.protobuf.LazyStringArrayList(compactionInput_);
5080           bitField0_ |= 0x00000008;
5081          }
5082       }
5083       /**
5084        * <code>repeated string compaction_input = 4;</code>
5085        *
5086        * <pre>
5087        * relative to store dir
5088        * </pre>
5089        */
5090       public java.util.List<java.lang.String>
5091           getCompactionInputList() {
5092         return java.util.Collections.unmodifiableList(compactionInput_);
5093       }
5094       /**
5095        * <code>repeated string compaction_input = 4;</code>
5096        *
5097        * <pre>
5098        * relative to store dir
5099        * </pre>
5100        */
5101       public int getCompactionInputCount() {
5102         return compactionInput_.size();
5103       }
5104       /**
5105        * <code>repeated string compaction_input = 4;</code>
5106        *
5107        * <pre>
5108        * relative to store dir
5109        * </pre>
5110        */
5111       public java.lang.String getCompactionInput(int index) {
5112         return compactionInput_.get(index);
5113       }
5114       /**
5115        * <code>repeated string compaction_input = 4;</code>
5116        *
5117        * <pre>
5118        * relative to store dir
5119        * </pre>
5120        */
5121       public com.google.protobuf.ByteString
5122           getCompactionInputBytes(int index) {
5123         return compactionInput_.getByteString(index);
5124       }
5125       /**
5126        * <code>repeated string compaction_input = 4;</code>
5127        *
5128        * <pre>
5129        * relative to store dir
5130        * </pre>
5131        */
5132       public Builder setCompactionInput(
5133           int index, java.lang.String value) {
5134         if (value == null) {
5135     throw new NullPointerException();
5136   }
5137   ensureCompactionInputIsMutable();
5138         compactionInput_.set(index, value);
5139         onChanged();
5140         return this;
5141       }
5142       /**
5143        * <code>repeated string compaction_input = 4;</code>
5144        *
5145        * <pre>
5146        * relative to store dir
5147        * </pre>
5148        */
5149       public Builder addCompactionInput(
5150           java.lang.String value) {
5151         if (value == null) {
5152     throw new NullPointerException();
5153   }
5154   ensureCompactionInputIsMutable();
5155         compactionInput_.add(value);
5156         onChanged();
5157         return this;
5158       }
5159       /**
5160        * <code>repeated string compaction_input = 4;</code>
5161        *
5162        * <pre>
5163        * relative to store dir
5164        * </pre>
5165        */
5166       public Builder addAllCompactionInput(
5167           java.lang.Iterable<java.lang.String> values) {
5168         ensureCompactionInputIsMutable();
5169         super.addAll(values, compactionInput_);
5170         onChanged();
5171         return this;
5172       }
5173       /**
5174        * <code>repeated string compaction_input = 4;</code>
5175        *
5176        * <pre>
5177        * relative to store dir
5178        * </pre>
5179        */
5180       public Builder clearCompactionInput() {
5181         compactionInput_ = com.google.protobuf.LazyStringArrayList.EMPTY;
5182         bitField0_ = (bitField0_ & ~0x00000008);
5183         onChanged();
5184         return this;
5185       }
5186       /**
5187        * <code>repeated string compaction_input = 4;</code>
5188        *
5189        * <pre>
5190        * relative to store dir
5191        * </pre>
5192        */
5193       public Builder addCompactionInputBytes(
5194           com.google.protobuf.ByteString value) {
5195         if (value == null) {
5196     throw new NullPointerException();
5197   }
5198   ensureCompactionInputIsMutable();
5199         compactionInput_.add(value);
5200         onChanged();
5201         return this;
5202       }
5203 
5204       // repeated string compaction_output = 5;
5205       private com.google.protobuf.LazyStringList compactionOutput_ = com.google.protobuf.LazyStringArrayList.EMPTY;
5206       private void ensureCompactionOutputIsMutable() {
5207         if (!((bitField0_ & 0x00000010) == 0x00000010)) {
5208           compactionOutput_ = new com.google.protobuf.LazyStringArrayList(compactionOutput_);
5209           bitField0_ |= 0x00000010;
5210          }
5211       }
5212       /**
5213        * <code>repeated string compaction_output = 5;</code>
5214        */
5215       public java.util.List<java.lang.String>
5216           getCompactionOutputList() {
5217         return java.util.Collections.unmodifiableList(compactionOutput_);
5218       }
5219       /**
5220        * <code>repeated string compaction_output = 5;</code>
5221        */
5222       public int getCompactionOutputCount() {
5223         return compactionOutput_.size();
5224       }
5225       /**
5226        * <code>repeated string compaction_output = 5;</code>
5227        */
5228       public java.lang.String getCompactionOutput(int index) {
5229         return compactionOutput_.get(index);
5230       }
5231       /**
5232        * <code>repeated string compaction_output = 5;</code>
5233        */
5234       public com.google.protobuf.ByteString
5235           getCompactionOutputBytes(int index) {
5236         return compactionOutput_.getByteString(index);
5237       }
5238       /**
5239        * <code>repeated string compaction_output = 5;</code>
5240        */
5241       public Builder setCompactionOutput(
5242           int index, java.lang.String value) {
5243         if (value == null) {
5244     throw new NullPointerException();
5245   }
5246   ensureCompactionOutputIsMutable();
5247         compactionOutput_.set(index, value);
5248         onChanged();
5249         return this;
5250       }
5251       /**
5252        * <code>repeated string compaction_output = 5;</code>
5253        */
5254       public Builder addCompactionOutput(
5255           java.lang.String value) {
5256         if (value == null) {
5257     throw new NullPointerException();
5258   }
5259   ensureCompactionOutputIsMutable();
5260         compactionOutput_.add(value);
5261         onChanged();
5262         return this;
5263       }
5264       /**
5265        * <code>repeated string compaction_output = 5;</code>
5266        */
5267       public Builder addAllCompactionOutput(
5268           java.lang.Iterable<java.lang.String> values) {
5269         ensureCompactionOutputIsMutable();
5270         super.addAll(values, compactionOutput_);
5271         onChanged();
5272         return this;
5273       }
5274       /**
5275        * <code>repeated string compaction_output = 5;</code>
5276        */
5277       public Builder clearCompactionOutput() {
5278         compactionOutput_ = com.google.protobuf.LazyStringArrayList.EMPTY;
5279         bitField0_ = (bitField0_ & ~0x00000010);
5280         onChanged();
5281         return this;
5282       }
5283       /**
5284        * <code>repeated string compaction_output = 5;</code>
5285        */
5286       public Builder addCompactionOutputBytes(
5287           com.google.protobuf.ByteString value) {
5288         if (value == null) {
5289     throw new NullPointerException();
5290   }
5291   ensureCompactionOutputIsMutable();
5292         compactionOutput_.add(value);
5293         onChanged();
5294         return this;
5295       }
5296 
5297       // required string store_home_dir = 6;
5298       private java.lang.Object storeHomeDir_ = "";
5299       /**
5300        * <code>required string store_home_dir = 6;</code>
5301        *
5302        * <pre>
5303        * relative to region dir
5304        * </pre>
5305        */
5306       public boolean hasStoreHomeDir() {
5307         return ((bitField0_ & 0x00000020) == 0x00000020);
5308       }
5309       /**
5310        * <code>required string store_home_dir = 6;</code>
5311        *
5312        * <pre>
5313        * relative to region dir
5314        * </pre>
5315        */
5316       public java.lang.String getStoreHomeDir() {
5317         java.lang.Object ref = storeHomeDir_;
5318         if (!(ref instanceof java.lang.String)) {
5319           java.lang.String s = ((com.google.protobuf.ByteString) ref)
5320               .toStringUtf8();
5321           storeHomeDir_ = s;
5322           return s;
5323         } else {
5324           return (java.lang.String) ref;
5325         }
5326       }
5327       /**
5328        * <code>required string store_home_dir = 6;</code>
5329        *
5330        * <pre>
5331        * relative to region dir
5332        * </pre>
5333        */
5334       public com.google.protobuf.ByteString
5335           getStoreHomeDirBytes() {
5336         java.lang.Object ref = storeHomeDir_;
5337         if (ref instanceof String) {
5338           com.google.protobuf.ByteString b = 
5339               com.google.protobuf.ByteString.copyFromUtf8(
5340                   (java.lang.String) ref);
5341           storeHomeDir_ = b;
5342           return b;
5343         } else {
5344           return (com.google.protobuf.ByteString) ref;
5345         }
5346       }
5347       /**
5348        * <code>required string store_home_dir = 6;</code>
5349        *
5350        * <pre>
5351        * relative to region dir
5352        * </pre>
5353        */
5354       public Builder setStoreHomeDir(
5355           java.lang.String value) {
5356         if (value == null) {
5357     throw new NullPointerException();
5358   }
5359   bitField0_ |= 0x00000020;
5360         storeHomeDir_ = value;
5361         onChanged();
5362         return this;
5363       }
5364       /**
5365        * <code>required string store_home_dir = 6;</code>
5366        *
5367        * <pre>
5368        * relative to region dir
5369        * </pre>
5370        */
5371       public Builder clearStoreHomeDir() {
5372         bitField0_ = (bitField0_ & ~0x00000020);
5373         storeHomeDir_ = getDefaultInstance().getStoreHomeDir();
5374         onChanged();
5375         return this;
5376       }
5377       /**
5378        * <code>required string store_home_dir = 6;</code>
5379        *
5380        * <pre>
5381        * relative to region dir
5382        * </pre>
5383        */
5384       public Builder setStoreHomeDirBytes(
5385           com.google.protobuf.ByteString value) {
5386         if (value == null) {
5387     throw new NullPointerException();
5388   }
5389   bitField0_ |= 0x00000020;
5390         storeHomeDir_ = value;
5391         onChanged();
5392         return this;
5393       }
5394 
5395       // optional bytes region_name = 7;
5396       private com.google.protobuf.ByteString regionName_ = com.google.protobuf.ByteString.EMPTY;
5397       /**
5398        * <code>optional bytes region_name = 7;</code>
5399        *
5400        * <pre>
5401        * full region name
5402        * </pre>
5403        */
5404       public boolean hasRegionName() {
5405         return ((bitField0_ & 0x00000040) == 0x00000040);
5406       }
5407       /**
5408        * <code>optional bytes region_name = 7;</code>
5409        *
5410        * <pre>
5411        * full region name
5412        * </pre>
5413        */
5414       public com.google.protobuf.ByteString getRegionName() {
5415         return regionName_;
5416       }
5417       /**
5418        * <code>optional bytes region_name = 7;</code>
5419        *
5420        * <pre>
5421        * full region name
5422        * </pre>
5423        */
5424       public Builder setRegionName(com.google.protobuf.ByteString value) {
5425         if (value == null) {
5426     throw new NullPointerException();
5427   }
5428   bitField0_ |= 0x00000040;
5429         regionName_ = value;
5430         onChanged();
5431         return this;
5432       }
5433       /**
5434        * <code>optional bytes region_name = 7;</code>
5435        *
5436        * <pre>
5437        * full region name
5438        * </pre>
5439        */
5440       public Builder clearRegionName() {
5441         bitField0_ = (bitField0_ & ~0x00000040);
5442         regionName_ = getDefaultInstance().getRegionName();
5443         onChanged();
5444         return this;
5445       }
5446 
5447       // @@protoc_insertion_point(builder_scope:hbase.pb.CompactionDescriptor)
5448     }
5449 
5450     static {
5451       defaultInstance = new CompactionDescriptor(true);
5452       defaultInstance.initFields();
5453     }
5454 
5455     // @@protoc_insertion_point(class_scope:hbase.pb.CompactionDescriptor)
5456   }
5457 
5458   public interface FlushDescriptorOrBuilder
5459       extends com.google.protobuf.MessageOrBuilder {
5460 
5461     // required .hbase.pb.FlushDescriptor.FlushAction action = 1;
5462     /**
5463      * <code>required .hbase.pb.FlushDescriptor.FlushAction action = 1;</code>
5464      */
5465     boolean hasAction();
5466     /**
5467      * <code>required .hbase.pb.FlushDescriptor.FlushAction action = 1;</code>
5468      */
5469     org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.FlushAction getAction();
5470 
5471     // required bytes table_name = 2;
5472     /**
5473      * <code>required bytes table_name = 2;</code>
5474      */
5475     boolean hasTableName();
5476     /**
5477      * <code>required bytes table_name = 2;</code>
5478      */
5479     com.google.protobuf.ByteString getTableName();
5480 
5481     // required bytes encoded_region_name = 3;
5482     /**
5483      * <code>required bytes encoded_region_name = 3;</code>
5484      */
5485     boolean hasEncodedRegionName();
5486     /**
5487      * <code>required bytes encoded_region_name = 3;</code>
5488      */
5489     com.google.protobuf.ByteString getEncodedRegionName();
5490 
5491     // optional uint64 flush_sequence_number = 4;
5492     /**
5493      * <code>optional uint64 flush_sequence_number = 4;</code>
5494      */
5495     boolean hasFlushSequenceNumber();
5496     /**
5497      * <code>optional uint64 flush_sequence_number = 4;</code>
5498      */
5499     long getFlushSequenceNumber();
5500 
5501     // repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;
5502     /**
5503      * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code>
5504      */
5505     java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor> 
5506         getStoreFlushesList();
5507     /**
5508      * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code>
5509      */
5510     org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor getStoreFlushes(int index);
5511     /**
5512      * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code>
5513      */
5514     int getStoreFlushesCount();
5515     /**
5516      * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code>
5517      */
5518     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptorOrBuilder> 
5519         getStoreFlushesOrBuilderList();
5520     /**
5521      * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code>
5522      */
5523     org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptorOrBuilder getStoreFlushesOrBuilder(
5524         int index);
5525 
5526     // optional bytes region_name = 6;
5527     /**
5528      * <code>optional bytes region_name = 6;</code>
5529      *
5530      * <pre>
5531      * full region name
5532      * </pre>
5533      */
5534     boolean hasRegionName();
5535     /**
5536      * <code>optional bytes region_name = 6;</code>
5537      *
5538      * <pre>
5539      * full region name
5540      * </pre>
5541      */
5542     com.google.protobuf.ByteString getRegionName();
5543   }
5544   /**
5545    * Protobuf type {@code hbase.pb.FlushDescriptor}
5546    *
5547    * <pre>
5548    **
5549    * Special WAL entry to hold all related to a flush.
5550    * </pre>
5551    */
5552   public static final class FlushDescriptor extends
5553       com.google.protobuf.GeneratedMessage
5554       implements FlushDescriptorOrBuilder {
5555     // Use FlushDescriptor.newBuilder() to construct.
5556     private FlushDescriptor(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
5557       super(builder);
5558       this.unknownFields = builder.getUnknownFields();
5559     }
5560     private FlushDescriptor(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
5561 
5562     private static final FlushDescriptor defaultInstance;
5563     public static FlushDescriptor getDefaultInstance() {
5564       return defaultInstance;
5565     }
5566 
5567     public FlushDescriptor getDefaultInstanceForType() {
5568       return defaultInstance;
5569     }
5570 
5571     private final com.google.protobuf.UnknownFieldSet unknownFields;
5572     @java.lang.Override
5573     public final com.google.protobuf.UnknownFieldSet
5574         getUnknownFields() {
5575       return this.unknownFields;
5576     }
5577     private FlushDescriptor(
5578         com.google.protobuf.CodedInputStream input,
5579         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5580         throws com.google.protobuf.InvalidProtocolBufferException {
5581       initFields();
5582       int mutable_bitField0_ = 0;
5583       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
5584           com.google.protobuf.UnknownFieldSet.newBuilder();
5585       try {
5586         boolean done = false;
5587         while (!done) {
5588           int tag = input.readTag();
5589           switch (tag) {
5590             case 0:
5591               done = true;
5592               break;
5593             default: {
5594               if (!parseUnknownField(input, unknownFields,
5595                                      extensionRegistry, tag)) {
5596                 done = true;
5597               }
5598               break;
5599             }
5600             case 8: {
5601               int rawValue = input.readEnum();
5602               org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.FlushAction value = org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.FlushAction.valueOf(rawValue);
5603               if (value == null) {
5604                 unknownFields.mergeVarintField(1, rawValue);
5605               } else {
5606                 bitField0_ |= 0x00000001;
5607                 action_ = value;
5608               }
5609               break;
5610             }
5611             case 18: {
5612               bitField0_ |= 0x00000002;
5613               tableName_ = input.readBytes();
5614               break;
5615             }
5616             case 26: {
5617               bitField0_ |= 0x00000004;
5618               encodedRegionName_ = input.readBytes();
5619               break;
5620             }
5621             case 32: {
5622               bitField0_ |= 0x00000008;
5623               flushSequenceNumber_ = input.readUInt64();
5624               break;
5625             }
5626             case 42: {
5627               if (!((mutable_bitField0_ & 0x00000010) == 0x00000010)) {
5628                 storeFlushes_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor>();
5629                 mutable_bitField0_ |= 0x00000010;
5630               }
5631               storeFlushes_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.PARSER, extensionRegistry));
5632               break;
5633             }
5634             case 50: {
5635               bitField0_ |= 0x00000010;
5636               regionName_ = input.readBytes();
5637               break;
5638             }
5639           }
5640         }
5641       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
5642         throw e.setUnfinishedMessage(this);
5643       } catch (java.io.IOException e) {
5644         throw new com.google.protobuf.InvalidProtocolBufferException(
5645             e.getMessage()).setUnfinishedMessage(this);
5646       } finally {
5647         if (((mutable_bitField0_ & 0x00000010) == 0x00000010)) {
5648           storeFlushes_ = java.util.Collections.unmodifiableList(storeFlushes_);
5649         }
5650         this.unknownFields = unknownFields.build();
5651         makeExtensionsImmutable();
5652       }
5653     }
5654     public static final com.google.protobuf.Descriptors.Descriptor
5655         getDescriptor() {
5656       return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_FlushDescriptor_descriptor;
5657     }
5658 
5659     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
5660         internalGetFieldAccessorTable() {
5661       return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_FlushDescriptor_fieldAccessorTable
5662           .ensureFieldAccessorsInitialized(
5663               org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.Builder.class);
5664     }
5665 
5666     public static com.google.protobuf.Parser<FlushDescriptor> PARSER =
5667         new com.google.protobuf.AbstractParser<FlushDescriptor>() {
5668       public FlushDescriptor parsePartialFrom(
5669           com.google.protobuf.CodedInputStream input,
5670           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5671           throws com.google.protobuf.InvalidProtocolBufferException {
5672         return new FlushDescriptor(input, extensionRegistry);
5673       }
5674     };
5675 
5676     @java.lang.Override
5677     public com.google.protobuf.Parser<FlushDescriptor> getParserForType() {
5678       return PARSER;
5679     }
5680 
5681     /**
5682      * Protobuf enum {@code hbase.pb.FlushDescriptor.FlushAction}
5683      */
5684     public enum FlushAction
5685         implements com.google.protobuf.ProtocolMessageEnum {
5686       /**
5687        * <code>START_FLUSH = 0;</code>
5688        */
5689       START_FLUSH(0, 0),
5690       /**
5691        * <code>COMMIT_FLUSH = 1;</code>
5692        */
5693       COMMIT_FLUSH(1, 1),
5694       /**
5695        * <code>ABORT_FLUSH = 2;</code>
5696        */
5697       ABORT_FLUSH(2, 2),
5698       /**
5699        * <code>CANNOT_FLUSH = 3;</code>
5700        *
5701        * <pre>
5702        * marker for indicating that a flush has been requested but cannot complete
5703        * </pre>
5704        */
5705       CANNOT_FLUSH(3, 3),
5706       ;
5707 
5708       /**
5709        * <code>START_FLUSH = 0;</code>
5710        */
5711       public static final int START_FLUSH_VALUE = 0;
5712       /**
5713        * <code>COMMIT_FLUSH = 1;</code>
5714        */
5715       public static final int COMMIT_FLUSH_VALUE = 1;
5716       /**
5717        * <code>ABORT_FLUSH = 2;</code>
5718        */
5719       public static final int ABORT_FLUSH_VALUE = 2;
5720       /**
5721        * <code>CANNOT_FLUSH = 3;</code>
5722        *
5723        * <pre>
5724        * marker for indicating that a flush has been requested but cannot complete
5725        * </pre>
5726        */
5727       public static final int CANNOT_FLUSH_VALUE = 3;
5728 
5729 
5730       public final int getNumber() { return value; }
5731 
5732       public static FlushAction valueOf(int value) {
5733         switch (value) {
5734           case 0: return START_FLUSH;
5735           case 1: return COMMIT_FLUSH;
5736           case 2: return ABORT_FLUSH;
5737           case 3: return CANNOT_FLUSH;
5738           default: return null;
5739         }
5740       }
5741 
5742       public static com.google.protobuf.Internal.EnumLiteMap<FlushAction>
5743           internalGetValueMap() {
5744         return internalValueMap;
5745       }
5746       private static com.google.protobuf.Internal.EnumLiteMap<FlushAction>
5747           internalValueMap =
5748             new com.google.protobuf.Internal.EnumLiteMap<FlushAction>() {
5749               public FlushAction findValueByNumber(int number) {
5750                 return FlushAction.valueOf(number);
5751               }
5752             };
5753 
5754       public final com.google.protobuf.Descriptors.EnumValueDescriptor
5755           getValueDescriptor() {
5756         return getDescriptor().getValues().get(index);
5757       }
5758       public final com.google.protobuf.Descriptors.EnumDescriptor
5759           getDescriptorForType() {
5760         return getDescriptor();
5761       }
5762       public static final com.google.protobuf.Descriptors.EnumDescriptor
5763           getDescriptor() {
5764         return org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.getDescriptor().getEnumTypes().get(0);
5765       }
5766 
5767       private static final FlushAction[] VALUES = values();
5768 
5769       public static FlushAction valueOf(
5770           com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
5771         if (desc.getType() != getDescriptor()) {
5772           throw new java.lang.IllegalArgumentException(
5773             "EnumValueDescriptor is not for this type.");
5774         }
5775         return VALUES[desc.getIndex()];
5776       }
5777 
5778       private final int index;
5779       private final int value;
5780 
5781       private FlushAction(int index, int value) {
5782         this.index = index;
5783         this.value = value;
5784       }
5785 
5786       // @@protoc_insertion_point(enum_scope:hbase.pb.FlushDescriptor.FlushAction)
5787     }
5788 
5789     public interface StoreFlushDescriptorOrBuilder
5790         extends com.google.protobuf.MessageOrBuilder {
5791 
5792       // required bytes family_name = 1;
5793       /**
5794        * <code>required bytes family_name = 1;</code>
5795        */
5796       boolean hasFamilyName();
5797       /**
5798        * <code>required bytes family_name = 1;</code>
5799        */
5800       com.google.protobuf.ByteString getFamilyName();
5801 
5802       // required string store_home_dir = 2;
5803       /**
5804        * <code>required string store_home_dir = 2;</code>
5805        *
5806        * <pre>
5807        *relative to region dir
5808        * </pre>
5809        */
5810       boolean hasStoreHomeDir();
5811       /**
5812        * <code>required string store_home_dir = 2;</code>
5813        *
5814        * <pre>
5815        *relative to region dir
5816        * </pre>
5817        */
5818       java.lang.String getStoreHomeDir();
5819       /**
5820        * <code>required string store_home_dir = 2;</code>
5821        *
5822        * <pre>
5823        *relative to region dir
5824        * </pre>
5825        */
5826       com.google.protobuf.ByteString
5827           getStoreHomeDirBytes();
5828 
5829       // repeated string flush_output = 3;
5830       /**
5831        * <code>repeated string flush_output = 3;</code>
5832        *
5833        * <pre>
5834        * relative to store dir (if this is a COMMIT_FLUSH)
5835        * </pre>
5836        */
5837       java.util.List<java.lang.String>
5838       getFlushOutputList();
5839       /**
5840        * <code>repeated string flush_output = 3;</code>
5841        *
5842        * <pre>
5843        * relative to store dir (if this is a COMMIT_FLUSH)
5844        * </pre>
5845        */
5846       int getFlushOutputCount();
5847       /**
5848        * <code>repeated string flush_output = 3;</code>
5849        *
5850        * <pre>
5851        * relative to store dir (if this is a COMMIT_FLUSH)
5852        * </pre>
5853        */
5854       java.lang.String getFlushOutput(int index);
5855       /**
5856        * <code>repeated string flush_output = 3;</code>
5857        *
5858        * <pre>
5859        * relative to store dir (if this is a COMMIT_FLUSH)
5860        * </pre>
5861        */
5862       com.google.protobuf.ByteString
5863           getFlushOutputBytes(int index);
5864     }
5865     /**
5866      * Protobuf type {@code hbase.pb.FlushDescriptor.StoreFlushDescriptor}
5867      */
5868     public static final class StoreFlushDescriptor extends
5869         com.google.protobuf.GeneratedMessage
5870         implements StoreFlushDescriptorOrBuilder {
5871       // Use StoreFlushDescriptor.newBuilder() to construct.
5872       private StoreFlushDescriptor(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
5873         super(builder);
5874         this.unknownFields = builder.getUnknownFields();
5875       }
5876       private StoreFlushDescriptor(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
5877 
5878       private static final StoreFlushDescriptor defaultInstance;
5879       public static StoreFlushDescriptor getDefaultInstance() {
5880         return defaultInstance;
5881       }
5882 
5883       public StoreFlushDescriptor getDefaultInstanceForType() {
5884         return defaultInstance;
5885       }
5886 
5887       private final com.google.protobuf.UnknownFieldSet unknownFields;
5888       @java.lang.Override
5889       public final com.google.protobuf.UnknownFieldSet
5890           getUnknownFields() {
5891         return this.unknownFields;
5892       }
5893       private StoreFlushDescriptor(
5894           com.google.protobuf.CodedInputStream input,
5895           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5896           throws com.google.protobuf.InvalidProtocolBufferException {
5897         initFields();
5898         int mutable_bitField0_ = 0;
5899         com.google.protobuf.UnknownFieldSet.Builder unknownFields =
5900             com.google.protobuf.UnknownFieldSet.newBuilder();
5901         try {
5902           boolean done = false;
5903           while (!done) {
5904             int tag = input.readTag();
5905             switch (tag) {
5906               case 0:
5907                 done = true;
5908                 break;
5909               default: {
5910                 if (!parseUnknownField(input, unknownFields,
5911                                        extensionRegistry, tag)) {
5912                   done = true;
5913                 }
5914                 break;
5915               }
5916               case 10: {
5917                 bitField0_ |= 0x00000001;
5918                 familyName_ = input.readBytes();
5919                 break;
5920               }
5921               case 18: {
5922                 bitField0_ |= 0x00000002;
5923                 storeHomeDir_ = input.readBytes();
5924                 break;
5925               }
5926               case 26: {
5927                 if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
5928                   flushOutput_ = new com.google.protobuf.LazyStringArrayList();
5929                   mutable_bitField0_ |= 0x00000004;
5930                 }
5931                 flushOutput_.add(input.readBytes());
5932                 break;
5933               }
5934             }
5935           }
5936         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
5937           throw e.setUnfinishedMessage(this);
5938         } catch (java.io.IOException e) {
5939           throw new com.google.protobuf.InvalidProtocolBufferException(
5940               e.getMessage()).setUnfinishedMessage(this);
5941         } finally {
5942           if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
5943             flushOutput_ = new com.google.protobuf.UnmodifiableLazyStringList(flushOutput_);
5944           }
5945           this.unknownFields = unknownFields.build();
5946           makeExtensionsImmutable();
5947         }
5948       }
5949       public static final com.google.protobuf.Descriptors.Descriptor
5950           getDescriptor() {
5951         return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_FlushDescriptor_StoreFlushDescriptor_descriptor;
5952       }
5953 
5954       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
5955           internalGetFieldAccessorTable() {
5956         return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_FlushDescriptor_StoreFlushDescriptor_fieldAccessorTable
5957             .ensureFieldAccessorsInitialized(
5958                 org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder.class);
5959       }
5960 
5961       public static com.google.protobuf.Parser<StoreFlushDescriptor> PARSER =
5962           new com.google.protobuf.AbstractParser<StoreFlushDescriptor>() {
5963         public StoreFlushDescriptor parsePartialFrom(
5964             com.google.protobuf.CodedInputStream input,
5965             com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5966             throws com.google.protobuf.InvalidProtocolBufferException {
5967           return new StoreFlushDescriptor(input, extensionRegistry);
5968         }
5969       };
5970 
5971       @java.lang.Override
5972       public com.google.protobuf.Parser<StoreFlushDescriptor> getParserForType() {
5973         return PARSER;
5974       }
5975 
5976       private int bitField0_;
5977       // required bytes family_name = 1;
5978       public static final int FAMILY_NAME_FIELD_NUMBER = 1;
5979       private com.google.protobuf.ByteString familyName_;
5980       /**
5981        * <code>required bytes family_name = 1;</code>
5982        */
5983       public boolean hasFamilyName() {
5984         return ((bitField0_ & 0x00000001) == 0x00000001);
5985       }
5986       /**
5987        * <code>required bytes family_name = 1;</code>
5988        */
5989       public com.google.protobuf.ByteString getFamilyName() {
5990         return familyName_;
5991       }
5992 
5993       // required string store_home_dir = 2;
5994       public static final int STORE_HOME_DIR_FIELD_NUMBER = 2;
5995       private java.lang.Object storeHomeDir_;
5996       /**
5997        * <code>required string store_home_dir = 2;</code>
5998        *
5999        * <pre>
6000        *relative to region dir
6001        * </pre>
6002        */
6003       public boolean hasStoreHomeDir() {
6004         return ((bitField0_ & 0x00000002) == 0x00000002);
6005       }
6006       /**
6007        * <code>required string store_home_dir = 2;</code>
6008        *
6009        * <pre>
6010        *relative to region dir
6011        * </pre>
6012        */
6013       public java.lang.String getStoreHomeDir() {
6014         java.lang.Object ref = storeHomeDir_;
6015         if (ref instanceof java.lang.String) {
6016           return (java.lang.String) ref;
6017         } else {
6018           com.google.protobuf.ByteString bs = 
6019               (com.google.protobuf.ByteString) ref;
6020           java.lang.String s = bs.toStringUtf8();
6021           if (bs.isValidUtf8()) {
6022             storeHomeDir_ = s;
6023           }
6024           return s;
6025         }
6026       }
6027       /**
6028        * <code>required string store_home_dir = 2;</code>
6029        *
6030        * <pre>
6031        *relative to region dir
6032        * </pre>
6033        */
6034       public com.google.protobuf.ByteString
6035           getStoreHomeDirBytes() {
6036         java.lang.Object ref = storeHomeDir_;
6037         if (ref instanceof java.lang.String) {
6038           com.google.protobuf.ByteString b = 
6039               com.google.protobuf.ByteString.copyFromUtf8(
6040                   (java.lang.String) ref);
6041           storeHomeDir_ = b;
6042           return b;
6043         } else {
6044           return (com.google.protobuf.ByteString) ref;
6045         }
6046       }
6047 
6048       // repeated string flush_output = 3;
6049       public static final int FLUSH_OUTPUT_FIELD_NUMBER = 3;
6050       private com.google.protobuf.LazyStringList flushOutput_;
6051       /**
6052        * <code>repeated string flush_output = 3;</code>
6053        *
6054        * <pre>
6055        * relative to store dir (if this is a COMMIT_FLUSH)
6056        * </pre>
6057        */
6058       public java.util.List<java.lang.String>
6059           getFlushOutputList() {
6060         return flushOutput_;
6061       }
6062       /**
6063        * <code>repeated string flush_output = 3;</code>
6064        *
6065        * <pre>
6066        * relative to store dir (if this is a COMMIT_FLUSH)
6067        * </pre>
6068        */
6069       public int getFlushOutputCount() {
6070         return flushOutput_.size();
6071       }
6072       /**
6073        * <code>repeated string flush_output = 3;</code>
6074        *
6075        * <pre>
6076        * relative to store dir (if this is a COMMIT_FLUSH)
6077        * </pre>
6078        */
6079       public java.lang.String getFlushOutput(int index) {
6080         return flushOutput_.get(index);
6081       }
6082       /**
6083        * <code>repeated string flush_output = 3;</code>
6084        *
6085        * <pre>
6086        * relative to store dir (if this is a COMMIT_FLUSH)
6087        * </pre>
6088        */
6089       public com.google.protobuf.ByteString
6090           getFlushOutputBytes(int index) {
6091         return flushOutput_.getByteString(index);
6092       }
6093 
6094       private void initFields() {
6095         familyName_ = com.google.protobuf.ByteString.EMPTY;
6096         storeHomeDir_ = "";
6097         flushOutput_ = com.google.protobuf.LazyStringArrayList.EMPTY;
6098       }
6099       private byte memoizedIsInitialized = -1;
6100       public final boolean isInitialized() {
6101         byte isInitialized = memoizedIsInitialized;
6102         if (isInitialized != -1) return isInitialized == 1;
6103 
6104         if (!hasFamilyName()) {
6105           memoizedIsInitialized = 0;
6106           return false;
6107         }
6108         if (!hasStoreHomeDir()) {
6109           memoizedIsInitialized = 0;
6110           return false;
6111         }
6112         memoizedIsInitialized = 1;
6113         return true;
6114       }
6115 
6116       public void writeTo(com.google.protobuf.CodedOutputStream output)
6117                           throws java.io.IOException {
6118         getSerializedSize();
6119         if (((bitField0_ & 0x00000001) == 0x00000001)) {
6120           output.writeBytes(1, familyName_);
6121         }
6122         if (((bitField0_ & 0x00000002) == 0x00000002)) {
6123           output.writeBytes(2, getStoreHomeDirBytes());
6124         }
6125         for (int i = 0; i < flushOutput_.size(); i++) {
6126           output.writeBytes(3, flushOutput_.getByteString(i));
6127         }
6128         getUnknownFields().writeTo(output);
6129       }
6130 
6131       private int memoizedSerializedSize = -1;
6132       public int getSerializedSize() {
6133         int size = memoizedSerializedSize;
6134         if (size != -1) return size;
6135 
6136         size = 0;
6137         if (((bitField0_ & 0x00000001) == 0x00000001)) {
6138           size += com.google.protobuf.CodedOutputStream
6139             .computeBytesSize(1, familyName_);
6140         }
6141         if (((bitField0_ & 0x00000002) == 0x00000002)) {
6142           size += com.google.protobuf.CodedOutputStream
6143             .computeBytesSize(2, getStoreHomeDirBytes());
6144         }
6145         {
6146           int dataSize = 0;
6147           for (int i = 0; i < flushOutput_.size(); i++) {
6148             dataSize += com.google.protobuf.CodedOutputStream
6149               .computeBytesSizeNoTag(flushOutput_.getByteString(i));
6150           }
6151           size += dataSize;
6152           size += 1 * getFlushOutputList().size();
6153         }
6154         size += getUnknownFields().getSerializedSize();
6155         memoizedSerializedSize = size;
6156         return size;
6157       }
6158 
6159       private static final long serialVersionUID = 0L;
6160       @java.lang.Override
6161       protected java.lang.Object writeReplace()
6162           throws java.io.ObjectStreamException {
6163         return super.writeReplace();
6164       }
6165 
6166       @java.lang.Override
6167       public boolean equals(final java.lang.Object obj) {
6168         if (obj == this) {
6169          return true;
6170         }
6171         if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor)) {
6172           return super.equals(obj);
6173         }
6174         org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor other = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor) obj;
6175 
6176         boolean result = true;
6177         result = result && (hasFamilyName() == other.hasFamilyName());
6178         if (hasFamilyName()) {
6179           result = result && getFamilyName()
6180               .equals(other.getFamilyName());
6181         }
6182         result = result && (hasStoreHomeDir() == other.hasStoreHomeDir());
6183         if (hasStoreHomeDir()) {
6184           result = result && getStoreHomeDir()
6185               .equals(other.getStoreHomeDir());
6186         }
6187         result = result && getFlushOutputList()
6188             .equals(other.getFlushOutputList());
6189         result = result &&
6190             getUnknownFields().equals(other.getUnknownFields());
6191         return result;
6192       }
6193 
6194       private int memoizedHashCode = 0;
6195       @java.lang.Override
6196       public int hashCode() {
6197         if (memoizedHashCode != 0) {
6198           return memoizedHashCode;
6199         }
6200         int hash = 41;
6201         hash = (19 * hash) + getDescriptorForType().hashCode();
6202         if (hasFamilyName()) {
6203           hash = (37 * hash) + FAMILY_NAME_FIELD_NUMBER;
6204           hash = (53 * hash) + getFamilyName().hashCode();
6205         }
6206         if (hasStoreHomeDir()) {
6207           hash = (37 * hash) + STORE_HOME_DIR_FIELD_NUMBER;
6208           hash = (53 * hash) + getStoreHomeDir().hashCode();
6209         }
6210         if (getFlushOutputCount() > 0) {
6211           hash = (37 * hash) + FLUSH_OUTPUT_FIELD_NUMBER;
6212           hash = (53 * hash) + getFlushOutputList().hashCode();
6213         }
6214         hash = (29 * hash) + getUnknownFields().hashCode();
6215         memoizedHashCode = hash;
6216         return hash;
6217       }
6218 
6219       public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parseFrom(
6220           com.google.protobuf.ByteString data)
6221           throws com.google.protobuf.InvalidProtocolBufferException {
6222         return PARSER.parseFrom(data);
6223       }
6224       public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parseFrom(
6225           com.google.protobuf.ByteString data,
6226           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6227           throws com.google.protobuf.InvalidProtocolBufferException {
6228         return PARSER.parseFrom(data, extensionRegistry);
6229       }
6230       public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parseFrom(byte[] data)
6231           throws com.google.protobuf.InvalidProtocolBufferException {
6232         return PARSER.parseFrom(data);
6233       }
6234       public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parseFrom(
6235           byte[] data,
6236           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6237           throws com.google.protobuf.InvalidProtocolBufferException {
6238         return PARSER.parseFrom(data, extensionRegistry);
6239       }
6240       public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parseFrom(java.io.InputStream input)
6241           throws java.io.IOException {
6242         return PARSER.parseFrom(input);
6243       }
6244       public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parseFrom(
6245           java.io.InputStream input,
6246           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6247           throws java.io.IOException {
6248         return PARSER.parseFrom(input, extensionRegistry);
6249       }
6250       public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parseDelimitedFrom(java.io.InputStream input)
6251           throws java.io.IOException {
6252         return PARSER.parseDelimitedFrom(input);
6253       }
6254       public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parseDelimitedFrom(
6255           java.io.InputStream input,
6256           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6257           throws java.io.IOException {
6258         return PARSER.parseDelimitedFrom(input, extensionRegistry);
6259       }
6260       public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parseFrom(
6261           com.google.protobuf.CodedInputStream input)
6262           throws java.io.IOException {
6263         return PARSER.parseFrom(input);
6264       }
6265       public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parseFrom(
6266           com.google.protobuf.CodedInputStream input,
6267           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6268           throws java.io.IOException {
6269         return PARSER.parseFrom(input, extensionRegistry);
6270       }
6271 
6272       public static Builder newBuilder() { return Builder.create(); }
6273       public Builder newBuilderForType() { return newBuilder(); }
6274       public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor prototype) {
6275         return newBuilder().mergeFrom(prototype);
6276       }
6277       public Builder toBuilder() { return newBuilder(this); }
6278 
6279       @java.lang.Override
6280       protected Builder newBuilderForType(
6281           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
6282         Builder builder = new Builder(parent);
6283         return builder;
6284       }
6285       /**
6286        * Protobuf type {@code hbase.pb.FlushDescriptor.StoreFlushDescriptor}
6287        */
6288       public static final class Builder extends
6289           com.google.protobuf.GeneratedMessage.Builder<Builder>
6290          implements org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptorOrBuilder {
6291         public static final com.google.protobuf.Descriptors.Descriptor
6292             getDescriptor() {
6293           return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_FlushDescriptor_StoreFlushDescriptor_descriptor;
6294         }
6295 
6296         protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
6297             internalGetFieldAccessorTable() {
6298           return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_FlushDescriptor_StoreFlushDescriptor_fieldAccessorTable
6299               .ensureFieldAccessorsInitialized(
6300                   org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder.class);
6301         }
6302 
6303         // Construct using org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.newBuilder()
6304         private Builder() {
6305           maybeForceBuilderInitialization();
6306         }
6307 
6308         private Builder(
6309             com.google.protobuf.GeneratedMessage.BuilderParent parent) {
6310           super(parent);
6311           maybeForceBuilderInitialization();
6312         }
6313         private void maybeForceBuilderInitialization() {
6314           if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
6315           }
6316         }
6317         private static Builder create() {
6318           return new Builder();
6319         }
6320 
6321         public Builder clear() {
6322           super.clear();
6323           familyName_ = com.google.protobuf.ByteString.EMPTY;
6324           bitField0_ = (bitField0_ & ~0x00000001);
6325           storeHomeDir_ = "";
6326           bitField0_ = (bitField0_ & ~0x00000002);
6327           flushOutput_ = com.google.protobuf.LazyStringArrayList.EMPTY;
6328           bitField0_ = (bitField0_ & ~0x00000004);
6329           return this;
6330         }
6331 
6332         public Builder clone() {
6333           return create().mergeFrom(buildPartial());
6334         }
6335 
6336         public com.google.protobuf.Descriptors.Descriptor
6337             getDescriptorForType() {
6338           return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_FlushDescriptor_StoreFlushDescriptor_descriptor;
6339         }
6340 
6341         public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor getDefaultInstanceForType() {
6342           return org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.getDefaultInstance();
6343         }
6344 
6345         public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor build() {
6346           org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor result = buildPartial();
6347           if (!result.isInitialized()) {
6348             throw newUninitializedMessageException(result);
6349           }
6350           return result;
6351         }
6352 
6353         public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor buildPartial() {
6354           org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor(this);
6355           int from_bitField0_ = bitField0_;
6356           int to_bitField0_ = 0;
6357           if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
6358             to_bitField0_ |= 0x00000001;
6359           }
6360           result.familyName_ = familyName_;
6361           if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
6362             to_bitField0_ |= 0x00000002;
6363           }
6364           result.storeHomeDir_ = storeHomeDir_;
6365           if (((bitField0_ & 0x00000004) == 0x00000004)) {
6366             flushOutput_ = new com.google.protobuf.UnmodifiableLazyStringList(
6367                 flushOutput_);
6368             bitField0_ = (bitField0_ & ~0x00000004);
6369           }
6370           result.flushOutput_ = flushOutput_;
6371           result.bitField0_ = to_bitField0_;
6372           onBuilt();
6373           return result;
6374         }
6375 
6376         public Builder mergeFrom(com.google.protobuf.Message other) {
6377           if (other instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor) {
6378             return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor)other);
6379           } else {
6380             super.mergeFrom(other);
6381             return this;
6382           }
6383         }
6384 
6385         public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor other) {
6386           if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.getDefaultInstance()) return this;
6387           if (other.hasFamilyName()) {
6388             setFamilyName(other.getFamilyName());
6389           }
6390           if (other.hasStoreHomeDir()) {
6391             bitField0_ |= 0x00000002;
6392             storeHomeDir_ = other.storeHomeDir_;
6393             onChanged();
6394           }
6395           if (!other.flushOutput_.isEmpty()) {
6396             if (flushOutput_.isEmpty()) {
6397               flushOutput_ = other.flushOutput_;
6398               bitField0_ = (bitField0_ & ~0x00000004);
6399             } else {
6400               ensureFlushOutputIsMutable();
6401               flushOutput_.addAll(other.flushOutput_);
6402             }
6403             onChanged();
6404           }
6405           this.mergeUnknownFields(other.getUnknownFields());
6406           return this;
6407         }
6408 
6409         public final boolean isInitialized() {
6410           if (!hasFamilyName()) {
6411             
6412             return false;
6413           }
6414           if (!hasStoreHomeDir()) {
6415             
6416             return false;
6417           }
6418           return true;
6419         }
6420 
6421         public Builder mergeFrom(
6422             com.google.protobuf.CodedInputStream input,
6423             com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6424             throws java.io.IOException {
6425           org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parsedMessage = null;
6426           try {
6427             parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
6428           } catch (com.google.protobuf.InvalidProtocolBufferException e) {
6429             parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor) e.getUnfinishedMessage();
6430             throw e;
6431           } finally {
6432             if (parsedMessage != null) {
6433               mergeFrom(parsedMessage);
6434             }
6435           }
6436           return this;
6437         }
6438         private int bitField0_;
6439 
6440         // required bytes family_name = 1;
6441         private com.google.protobuf.ByteString familyName_ = com.google.protobuf.ByteString.EMPTY;
6442         /**
6443          * <code>required bytes family_name = 1;</code>
6444          */
6445         public boolean hasFamilyName() {
6446           return ((bitField0_ & 0x00000001) == 0x00000001);
6447         }
6448         /**
6449          * <code>required bytes family_name = 1;</code>
6450          */
6451         public com.google.protobuf.ByteString getFamilyName() {
6452           return familyName_;
6453         }
6454         /**
6455          * <code>required bytes family_name = 1;</code>
6456          */
6457         public Builder setFamilyName(com.google.protobuf.ByteString value) {
6458           if (value == null) {
6459     throw new NullPointerException();
6460   }
6461   bitField0_ |= 0x00000001;
6462           familyName_ = value;
6463           onChanged();
6464           return this;
6465         }
6466         /**
6467          * <code>required bytes family_name = 1;</code>
6468          */
6469         public Builder clearFamilyName() {
6470           bitField0_ = (bitField0_ & ~0x00000001);
6471           familyName_ = getDefaultInstance().getFamilyName();
6472           onChanged();
6473           return this;
6474         }
6475 
6476         // required string store_home_dir = 2;
6477         private java.lang.Object storeHomeDir_ = "";
6478         /**
6479          * <code>required string store_home_dir = 2;</code>
6480          *
6481          * <pre>
6482          *relative to region dir
6483          * </pre>
6484          */
6485         public boolean hasStoreHomeDir() {
6486           return ((bitField0_ & 0x00000002) == 0x00000002);
6487         }
6488         /**
6489          * <code>required string store_home_dir = 2;</code>
6490          *
6491          * <pre>
6492          *relative to region dir
6493          * </pre>
6494          */
6495         public java.lang.String getStoreHomeDir() {
6496           java.lang.Object ref = storeHomeDir_;
6497           if (!(ref instanceof java.lang.String)) {
6498             java.lang.String s = ((com.google.protobuf.ByteString) ref)
6499                 .toStringUtf8();
6500             storeHomeDir_ = s;
6501             return s;
6502           } else {
6503             return (java.lang.String) ref;
6504           }
6505         }
6506         /**
6507          * <code>required string store_home_dir = 2;</code>
6508          *
6509          * <pre>
6510          *relative to region dir
6511          * </pre>
6512          */
6513         public com.google.protobuf.ByteString
6514             getStoreHomeDirBytes() {
6515           java.lang.Object ref = storeHomeDir_;
6516           if (ref instanceof String) {
6517             com.google.protobuf.ByteString b = 
6518                 com.google.protobuf.ByteString.copyFromUtf8(
6519                     (java.lang.String) ref);
6520             storeHomeDir_ = b;
6521             return b;
6522           } else {
6523             return (com.google.protobuf.ByteString) ref;
6524           }
6525         }
6526         /**
6527          * <code>required string store_home_dir = 2;</code>
6528          *
6529          * <pre>
6530          *relative to region dir
6531          * </pre>
6532          */
6533         public Builder setStoreHomeDir(
6534             java.lang.String value) {
6535           if (value == null) {
6536     throw new NullPointerException();
6537   }
6538   bitField0_ |= 0x00000002;
6539           storeHomeDir_ = value;
6540           onChanged();
6541           return this;
6542         }
6543         /**
6544          * <code>required string store_home_dir = 2;</code>
6545          *
6546          * <pre>
6547          *relative to region dir
6548          * </pre>
6549          */
6550         public Builder clearStoreHomeDir() {
6551           bitField0_ = (bitField0_ & ~0x00000002);
6552           storeHomeDir_ = getDefaultInstance().getStoreHomeDir();
6553           onChanged();
6554           return this;
6555         }
6556         /**
6557          * <code>required string store_home_dir = 2;</code>
6558          *
6559          * <pre>
6560          *relative to region dir
6561          * </pre>
6562          */
6563         public Builder setStoreHomeDirBytes(
6564             com.google.protobuf.ByteString value) {
6565           if (value == null) {
6566     throw new NullPointerException();
6567   }
6568   bitField0_ |= 0x00000002;
6569           storeHomeDir_ = value;
6570           onChanged();
6571           return this;
6572         }
6573 
6574         // repeated string flush_output = 3;
6575         private com.google.protobuf.LazyStringList flushOutput_ = com.google.protobuf.LazyStringArrayList.EMPTY;
6576         private void ensureFlushOutputIsMutable() {
6577           if (!((bitField0_ & 0x00000004) == 0x00000004)) {
6578             flushOutput_ = new com.google.protobuf.LazyStringArrayList(flushOutput_);
6579             bitField0_ |= 0x00000004;
6580            }
6581         }
6582         /**
6583          * <code>repeated string flush_output = 3;</code>
6584          *
6585          * <pre>
6586          * relative to store dir (if this is a COMMIT_FLUSH)
6587          * </pre>
6588          */
6589         public java.util.List<java.lang.String>
6590             getFlushOutputList() {
6591           return java.util.Collections.unmodifiableList(flushOutput_);
6592         }
6593         /**
6594          * <code>repeated string flush_output = 3;</code>
6595          *
6596          * <pre>
6597          * relative to store dir (if this is a COMMIT_FLUSH)
6598          * </pre>
6599          */
6600         public int getFlushOutputCount() {
6601           return flushOutput_.size();
6602         }
6603         /**
6604          * <code>repeated string flush_output = 3;</code>
6605          *
6606          * <pre>
6607          * relative to store dir (if this is a COMMIT_FLUSH)
6608          * </pre>
6609          */
6610         public java.lang.String getFlushOutput(int index) {
6611           return flushOutput_.get(index);
6612         }
6613         /**
6614          * <code>repeated string flush_output = 3;</code>
6615          *
6616          * <pre>
6617          * relative to store dir (if this is a COMMIT_FLUSH)
6618          * </pre>
6619          */
6620         public com.google.protobuf.ByteString
6621             getFlushOutputBytes(int index) {
6622           return flushOutput_.getByteString(index);
6623         }
6624         /**
6625          * <code>repeated string flush_output = 3;</code>
6626          *
6627          * <pre>
6628          * relative to store dir (if this is a COMMIT_FLUSH)
6629          * </pre>
6630          */
6631         public Builder setFlushOutput(
6632             int index, java.lang.String value) {
6633           if (value == null) {
6634     throw new NullPointerException();
6635   }
6636   ensureFlushOutputIsMutable();
6637           flushOutput_.set(index, value);
6638           onChanged();
6639           return this;
6640         }
6641         /**
6642          * <code>repeated string flush_output = 3;</code>
6643          *
6644          * <pre>
6645          * relative to store dir (if this is a COMMIT_FLUSH)
6646          * </pre>
6647          */
6648         public Builder addFlushOutput(
6649             java.lang.String value) {
6650           if (value == null) {
6651     throw new NullPointerException();
6652   }
6653   ensureFlushOutputIsMutable();
6654           flushOutput_.add(value);
6655           onChanged();
6656           return this;
6657         }
6658         /**
6659          * <code>repeated string flush_output = 3;</code>
6660          *
6661          * <pre>
6662          * relative to store dir (if this is a COMMIT_FLUSH)
6663          * </pre>
6664          */
6665         public Builder addAllFlushOutput(
6666             java.lang.Iterable<java.lang.String> values) {
6667           ensureFlushOutputIsMutable();
6668           super.addAll(values, flushOutput_);
6669           onChanged();
6670           return this;
6671         }
6672         /**
6673          * <code>repeated string flush_output = 3;</code>
6674          *
6675          * <pre>
6676          * relative to store dir (if this is a COMMIT_FLUSH)
6677          * </pre>
6678          */
6679         public Builder clearFlushOutput() {
6680           flushOutput_ = com.google.protobuf.LazyStringArrayList.EMPTY;
6681           bitField0_ = (bitField0_ & ~0x00000004);
6682           onChanged();
6683           return this;
6684         }
6685         /**
6686          * <code>repeated string flush_output = 3;</code>
6687          *
6688          * <pre>
6689          * relative to store dir (if this is a COMMIT_FLUSH)
6690          * </pre>
6691          */
6692         public Builder addFlushOutputBytes(
6693             com.google.protobuf.ByteString value) {
6694           if (value == null) {
6695     throw new NullPointerException();
6696   }
6697   ensureFlushOutputIsMutable();
6698           flushOutput_.add(value);
6699           onChanged();
6700           return this;
6701         }
6702 
6703         // @@protoc_insertion_point(builder_scope:hbase.pb.FlushDescriptor.StoreFlushDescriptor)
6704       }
6705 
6706       static {
6707         defaultInstance = new StoreFlushDescriptor(true);
6708         defaultInstance.initFields();
6709       }
6710 
6711       // @@protoc_insertion_point(class_scope:hbase.pb.FlushDescriptor.StoreFlushDescriptor)
6712     }
6713 
6714     private int bitField0_;
6715     // required .hbase.pb.FlushDescriptor.FlushAction action = 1;
6716     public static final int ACTION_FIELD_NUMBER = 1;
6717     private org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.FlushAction action_;
6718     /**
6719      * <code>required .hbase.pb.FlushDescriptor.FlushAction action = 1;</code>
6720      */
6721     public boolean hasAction() {
6722       return ((bitField0_ & 0x00000001) == 0x00000001);
6723     }
6724     /**
6725      * <code>required .hbase.pb.FlushDescriptor.FlushAction action = 1;</code>
6726      */
6727     public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.FlushAction getAction() {
6728       return action_;
6729     }
6730 
6731     // required bytes table_name = 2;
6732     public static final int TABLE_NAME_FIELD_NUMBER = 2;
6733     private com.google.protobuf.ByteString tableName_;
6734     /**
6735      * <code>required bytes table_name = 2;</code>
6736      */
6737     public boolean hasTableName() {
6738       return ((bitField0_ & 0x00000002) == 0x00000002);
6739     }
6740     /**
6741      * <code>required bytes table_name = 2;</code>
6742      */
6743     public com.google.protobuf.ByteString getTableName() {
6744       return tableName_;
6745     }
6746 
6747     // required bytes encoded_region_name = 3;
6748     public static final int ENCODED_REGION_NAME_FIELD_NUMBER = 3;
6749     private com.google.protobuf.ByteString encodedRegionName_;
6750     /**
6751      * <code>required bytes encoded_region_name = 3;</code>
6752      */
6753     public boolean hasEncodedRegionName() {
6754       return ((bitField0_ & 0x00000004) == 0x00000004);
6755     }
6756     /**
6757      * <code>required bytes encoded_region_name = 3;</code>
6758      */
6759     public com.google.protobuf.ByteString getEncodedRegionName() {
6760       return encodedRegionName_;
6761     }
6762 
6763     // optional uint64 flush_sequence_number = 4;
6764     public static final int FLUSH_SEQUENCE_NUMBER_FIELD_NUMBER = 4;
6765     private long flushSequenceNumber_;
6766     /**
6767      * <code>optional uint64 flush_sequence_number = 4;</code>
6768      */
6769     public boolean hasFlushSequenceNumber() {
6770       return ((bitField0_ & 0x00000008) == 0x00000008);
6771     }
6772     /**
6773      * <code>optional uint64 flush_sequence_number = 4;</code>
6774      */
6775     public long getFlushSequenceNumber() {
6776       return flushSequenceNumber_;
6777     }
6778 
6779     // repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;
6780     public static final int STORE_FLUSHES_FIELD_NUMBER = 5;
6781     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor> storeFlushes_;
6782     /**
6783      * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code>
6784      */
6785     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor> getStoreFlushesList() {
6786       return storeFlushes_;
6787     }
6788     /**
6789      * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code>
6790      */
6791     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptorOrBuilder> 
6792         getStoreFlushesOrBuilderList() {
6793       return storeFlushes_;
6794     }
6795     /**
6796      * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code>
6797      */
6798     public int getStoreFlushesCount() {
6799       return storeFlushes_.size();
6800     }
6801     /**
6802      * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code>
6803      */
6804     public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor getStoreFlushes(int index) {
6805       return storeFlushes_.get(index);
6806     }
6807     /**
6808      * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code>
6809      */
6810     public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptorOrBuilder getStoreFlushesOrBuilder(
6811         int index) {
6812       return storeFlushes_.get(index);
6813     }
6814 
6815     // optional bytes region_name = 6;
6816     public static final int REGION_NAME_FIELD_NUMBER = 6;
6817     private com.google.protobuf.ByteString regionName_;
6818     /**
6819      * <code>optional bytes region_name = 6;</code>
6820      *
6821      * <pre>
6822      * full region name
6823      * </pre>
6824      */
6825     public boolean hasRegionName() {
6826       return ((bitField0_ & 0x00000010) == 0x00000010);
6827     }
6828     /**
6829      * <code>optional bytes region_name = 6;</code>
6830      *
6831      * <pre>
6832      * full region name
6833      * </pre>
6834      */
6835     public com.google.protobuf.ByteString getRegionName() {
6836       return regionName_;
6837     }
6838 
6839     private void initFields() {
6840       action_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.FlushAction.START_FLUSH;
6841       tableName_ = com.google.protobuf.ByteString.EMPTY;
6842       encodedRegionName_ = com.google.protobuf.ByteString.EMPTY;
6843       flushSequenceNumber_ = 0L;
6844       storeFlushes_ = java.util.Collections.emptyList();
6845       regionName_ = com.google.protobuf.ByteString.EMPTY;
6846     }
6847     private byte memoizedIsInitialized = -1;
6848     public final boolean isInitialized() {
6849       byte isInitialized = memoizedIsInitialized;
6850       if (isInitialized != -1) return isInitialized == 1;
6851 
6852       if (!hasAction()) {
6853         memoizedIsInitialized = 0;
6854         return false;
6855       }
6856       if (!hasTableName()) {
6857         memoizedIsInitialized = 0;
6858         return false;
6859       }
6860       if (!hasEncodedRegionName()) {
6861         memoizedIsInitialized = 0;
6862         return false;
6863       }
6864       for (int i = 0; i < getStoreFlushesCount(); i++) {
6865         if (!getStoreFlushes(i).isInitialized()) {
6866           memoizedIsInitialized = 0;
6867           return false;
6868         }
6869       }
6870       memoizedIsInitialized = 1;
6871       return true;
6872     }
6873 
6874     public void writeTo(com.google.protobuf.CodedOutputStream output)
6875                         throws java.io.IOException {
6876       getSerializedSize();
6877       if (((bitField0_ & 0x00000001) == 0x00000001)) {
6878         output.writeEnum(1, action_.getNumber());
6879       }
6880       if (((bitField0_ & 0x00000002) == 0x00000002)) {
6881         output.writeBytes(2, tableName_);
6882       }
6883       if (((bitField0_ & 0x00000004) == 0x00000004)) {
6884         output.writeBytes(3, encodedRegionName_);
6885       }
6886       if (((bitField0_ & 0x00000008) == 0x00000008)) {
6887         output.writeUInt64(4, flushSequenceNumber_);
6888       }
6889       for (int i = 0; i < storeFlushes_.size(); i++) {
6890         output.writeMessage(5, storeFlushes_.get(i));
6891       }
6892       if (((bitField0_ & 0x00000010) == 0x00000010)) {
6893         output.writeBytes(6, regionName_);
6894       }
6895       getUnknownFields().writeTo(output);
6896     }
6897 
6898     private int memoizedSerializedSize = -1;
6899     public int getSerializedSize() {
6900       int size = memoizedSerializedSize;
6901       if (size != -1) return size;
6902 
6903       size = 0;
6904       if (((bitField0_ & 0x00000001) == 0x00000001)) {
6905         size += com.google.protobuf.CodedOutputStream
6906           .computeEnumSize(1, action_.getNumber());
6907       }
6908       if (((bitField0_ & 0x00000002) == 0x00000002)) {
6909         size += com.google.protobuf.CodedOutputStream
6910           .computeBytesSize(2, tableName_);
6911       }
6912       if (((bitField0_ & 0x00000004) == 0x00000004)) {
6913         size += com.google.protobuf.CodedOutputStream
6914           .computeBytesSize(3, encodedRegionName_);
6915       }
6916       if (((bitField0_ & 0x00000008) == 0x00000008)) {
6917         size += com.google.protobuf.CodedOutputStream
6918           .computeUInt64Size(4, flushSequenceNumber_);
6919       }
6920       for (int i = 0; i < storeFlushes_.size(); i++) {
6921         size += com.google.protobuf.CodedOutputStream
6922           .computeMessageSize(5, storeFlushes_.get(i));
6923       }
6924       if (((bitField0_ & 0x00000010) == 0x00000010)) {
6925         size += com.google.protobuf.CodedOutputStream
6926           .computeBytesSize(6, regionName_);
6927       }
6928       size += getUnknownFields().getSerializedSize();
6929       memoizedSerializedSize = size;
6930       return size;
6931     }
6932 
6933     private static final long serialVersionUID = 0L;
6934     @java.lang.Override
6935     protected java.lang.Object writeReplace()
6936         throws java.io.ObjectStreamException {
6937       return super.writeReplace();
6938     }
6939 
6940     @java.lang.Override
6941     public boolean equals(final java.lang.Object obj) {
6942       if (obj == this) {
6943        return true;
6944       }
6945       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor)) {
6946         return super.equals(obj);
6947       }
6948       org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor other = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor) obj;
6949 
6950       boolean result = true;
6951       result = result && (hasAction() == other.hasAction());
6952       if (hasAction()) {
6953         result = result &&
6954             (getAction() == other.getAction());
6955       }
6956       result = result && (hasTableName() == other.hasTableName());
6957       if (hasTableName()) {
6958         result = result && getTableName()
6959             .equals(other.getTableName());
6960       }
6961       result = result && (hasEncodedRegionName() == other.hasEncodedRegionName());
6962       if (hasEncodedRegionName()) {
6963         result = result && getEncodedRegionName()
6964             .equals(other.getEncodedRegionName());
6965       }
6966       result = result && (hasFlushSequenceNumber() == other.hasFlushSequenceNumber());
6967       if (hasFlushSequenceNumber()) {
6968         result = result && (getFlushSequenceNumber()
6969             == other.getFlushSequenceNumber());
6970       }
6971       result = result && getStoreFlushesList()
6972           .equals(other.getStoreFlushesList());
6973       result = result && (hasRegionName() == other.hasRegionName());
6974       if (hasRegionName()) {
6975         result = result && getRegionName()
6976             .equals(other.getRegionName());
6977       }
6978       result = result &&
6979           getUnknownFields().equals(other.getUnknownFields());
6980       return result;
6981     }
6982 
6983     private int memoizedHashCode = 0;
6984     @java.lang.Override
6985     public int hashCode() {
6986       if (memoizedHashCode != 0) {
6987         return memoizedHashCode;
6988       }
6989       int hash = 41;
6990       hash = (19 * hash) + getDescriptorForType().hashCode();
6991       if (hasAction()) {
6992         hash = (37 * hash) + ACTION_FIELD_NUMBER;
6993         hash = (53 * hash) + hashEnum(getAction());
6994       }
6995       if (hasTableName()) {
6996         hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER;
6997         hash = (53 * hash) + getTableName().hashCode();
6998       }
6999       if (hasEncodedRegionName()) {
7000         hash = (37 * hash) + ENCODED_REGION_NAME_FIELD_NUMBER;
7001         hash = (53 * hash) + getEncodedRegionName().hashCode();
7002       }
7003       if (hasFlushSequenceNumber()) {
7004         hash = (37 * hash) + FLUSH_SEQUENCE_NUMBER_FIELD_NUMBER;
7005         hash = (53 * hash) + hashLong(getFlushSequenceNumber());
7006       }
7007       if (getStoreFlushesCount() > 0) {
7008         hash = (37 * hash) + STORE_FLUSHES_FIELD_NUMBER;
7009         hash = (53 * hash) + getStoreFlushesList().hashCode();
7010       }
7011       if (hasRegionName()) {
7012         hash = (37 * hash) + REGION_NAME_FIELD_NUMBER;
7013         hash = (53 * hash) + getRegionName().hashCode();
7014       }
7015       hash = (29 * hash) + getUnknownFields().hashCode();
7016       memoizedHashCode = hash;
7017       return hash;
7018     }
7019 
7020     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor parseFrom(
7021         com.google.protobuf.ByteString data)
7022         throws com.google.protobuf.InvalidProtocolBufferException {
7023       return PARSER.parseFrom(data);
7024     }
7025     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor parseFrom(
7026         com.google.protobuf.ByteString data,
7027         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7028         throws com.google.protobuf.InvalidProtocolBufferException {
7029       return PARSER.parseFrom(data, extensionRegistry);
7030     }
7031     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor parseFrom(byte[] data)
7032         throws com.google.protobuf.InvalidProtocolBufferException {
7033       return PARSER.parseFrom(data);
7034     }
7035     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor parseFrom(
7036         byte[] data,
7037         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7038         throws com.google.protobuf.InvalidProtocolBufferException {
7039       return PARSER.parseFrom(data, extensionRegistry);
7040     }
7041     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor parseFrom(java.io.InputStream input)
7042         throws java.io.IOException {
7043       return PARSER.parseFrom(input);
7044     }
7045     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor parseFrom(
7046         java.io.InputStream input,
7047         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7048         throws java.io.IOException {
7049       return PARSER.parseFrom(input, extensionRegistry);
7050     }
7051     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor parseDelimitedFrom(java.io.InputStream input)
7052         throws java.io.IOException {
7053       return PARSER.parseDelimitedFrom(input);
7054     }
7055     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor parseDelimitedFrom(
7056         java.io.InputStream input,
7057         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7058         throws java.io.IOException {
7059       return PARSER.parseDelimitedFrom(input, extensionRegistry);
7060     }
7061     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor parseFrom(
7062         com.google.protobuf.CodedInputStream input)
7063         throws java.io.IOException {
7064       return PARSER.parseFrom(input);
7065     }
7066     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor parseFrom(
7067         com.google.protobuf.CodedInputStream input,
7068         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7069         throws java.io.IOException {
7070       return PARSER.parseFrom(input, extensionRegistry);
7071     }
7072 
7073     public static Builder newBuilder() { return Builder.create(); }
7074     public Builder newBuilderForType() { return newBuilder(); }
7075     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor prototype) {
7076       return newBuilder().mergeFrom(prototype);
7077     }
7078     public Builder toBuilder() { return newBuilder(this); }
7079 
7080     @java.lang.Override
7081     protected Builder newBuilderForType(
7082         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
7083       Builder builder = new Builder(parent);
7084       return builder;
7085     }
7086     /**
7087      * Protobuf type {@code hbase.pb.FlushDescriptor}
7088      *
7089      * <pre>
7090      **
7091      * Special WAL entry to hold all related to a flush.
7092      * </pre>
7093      */
7094     public static final class Builder extends
7095         com.google.protobuf.GeneratedMessage.Builder<Builder>
7096        implements org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptorOrBuilder {
7097       public static final com.google.protobuf.Descriptors.Descriptor
7098           getDescriptor() {
7099         return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_FlushDescriptor_descriptor;
7100       }
7101 
7102       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
7103           internalGetFieldAccessorTable() {
7104         return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_FlushDescriptor_fieldAccessorTable
7105             .ensureFieldAccessorsInitialized(
7106                 org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.Builder.class);
7107       }
7108 
7109       // Construct using org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.newBuilder()
7110       private Builder() {
7111         maybeForceBuilderInitialization();
7112       }
7113 
7114       private Builder(
7115           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
7116         super(parent);
7117         maybeForceBuilderInitialization();
7118       }
7119       private void maybeForceBuilderInitialization() {
7120         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
7121           getStoreFlushesFieldBuilder();
7122         }
7123       }
7124       private static Builder create() {
7125         return new Builder();
7126       }
7127 
7128       public Builder clear() {
7129         super.clear();
7130         action_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.FlushAction.START_FLUSH;
7131         bitField0_ = (bitField0_ & ~0x00000001);
7132         tableName_ = com.google.protobuf.ByteString.EMPTY;
7133         bitField0_ = (bitField0_ & ~0x00000002);
7134         encodedRegionName_ = com.google.protobuf.ByteString.EMPTY;
7135         bitField0_ = (bitField0_ & ~0x00000004);
7136         flushSequenceNumber_ = 0L;
7137         bitField0_ = (bitField0_ & ~0x00000008);
7138         if (storeFlushesBuilder_ == null) {
7139           storeFlushes_ = java.util.Collections.emptyList();
7140           bitField0_ = (bitField0_ & ~0x00000010);
7141         } else {
7142           storeFlushesBuilder_.clear();
7143         }
7144         regionName_ = com.google.protobuf.ByteString.EMPTY;
7145         bitField0_ = (bitField0_ & ~0x00000020);
7146         return this;
7147       }
7148 
7149       public Builder clone() {
7150         return create().mergeFrom(buildPartial());
7151       }
7152 
7153       public com.google.protobuf.Descriptors.Descriptor
7154           getDescriptorForType() {
7155         return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_FlushDescriptor_descriptor;
7156       }
7157 
7158       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor getDefaultInstanceForType() {
7159         return org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.getDefaultInstance();
7160       }
7161 
7162       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor build() {
7163         org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor result = buildPartial();
7164         if (!result.isInitialized()) {
7165           throw newUninitializedMessageException(result);
7166         }
7167         return result;
7168       }
7169 
7170       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor buildPartial() {
7171         org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor(this);
7172         int from_bitField0_ = bitField0_;
7173         int to_bitField0_ = 0;
7174         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
7175           to_bitField0_ |= 0x00000001;
7176         }
7177         result.action_ = action_;
7178         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
7179           to_bitField0_ |= 0x00000002;
7180         }
7181         result.tableName_ = tableName_;
7182         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
7183           to_bitField0_ |= 0x00000004;
7184         }
7185         result.encodedRegionName_ = encodedRegionName_;
7186         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
7187           to_bitField0_ |= 0x00000008;
7188         }
7189         result.flushSequenceNumber_ = flushSequenceNumber_;
7190         if (storeFlushesBuilder_ == null) {
7191           if (((bitField0_ & 0x00000010) == 0x00000010)) {
7192             storeFlushes_ = java.util.Collections.unmodifiableList(storeFlushes_);
7193             bitField0_ = (bitField0_ & ~0x00000010);
7194           }
7195           result.storeFlushes_ = storeFlushes_;
7196         } else {
7197           result.storeFlushes_ = storeFlushesBuilder_.build();
7198         }
7199         if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
7200           to_bitField0_ |= 0x00000010;
7201         }
7202         result.regionName_ = regionName_;
7203         result.bitField0_ = to_bitField0_;
7204         onBuilt();
7205         return result;
7206       }
7207 
7208       public Builder mergeFrom(com.google.protobuf.Message other) {
7209         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor) {
7210           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor)other);
7211         } else {
7212           super.mergeFrom(other);
7213           return this;
7214         }
7215       }
7216 
7217       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor other) {
7218         if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.getDefaultInstance()) return this;
7219         if (other.hasAction()) {
7220           setAction(other.getAction());
7221         }
7222         if (other.hasTableName()) {
7223           setTableName(other.getTableName());
7224         }
7225         if (other.hasEncodedRegionName()) {
7226           setEncodedRegionName(other.getEncodedRegionName());
7227         }
7228         if (other.hasFlushSequenceNumber()) {
7229           setFlushSequenceNumber(other.getFlushSequenceNumber());
7230         }
7231         if (storeFlushesBuilder_ == null) {
7232           if (!other.storeFlushes_.isEmpty()) {
7233             if (storeFlushes_.isEmpty()) {
7234               storeFlushes_ = other.storeFlushes_;
7235               bitField0_ = (bitField0_ & ~0x00000010);
7236             } else {
7237               ensureStoreFlushesIsMutable();
7238               storeFlushes_.addAll(other.storeFlushes_);
7239             }
7240             onChanged();
7241           }
7242         } else {
7243           if (!other.storeFlushes_.isEmpty()) {
7244             if (storeFlushesBuilder_.isEmpty()) {
7245               storeFlushesBuilder_.dispose();
7246               storeFlushesBuilder_ = null;
7247               storeFlushes_ = other.storeFlushes_;
7248               bitField0_ = (bitField0_ & ~0x00000010);
7249               storeFlushesBuilder_ = 
7250                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
7251                    getStoreFlushesFieldBuilder() : null;
7252             } else {
7253               storeFlushesBuilder_.addAllMessages(other.storeFlushes_);
7254             }
7255           }
7256         }
7257         if (other.hasRegionName()) {
7258           setRegionName(other.getRegionName());
7259         }
7260         this.mergeUnknownFields(other.getUnknownFields());
7261         return this;
7262       }
7263 
7264       public final boolean isInitialized() {
7265         if (!hasAction()) {
7266           
7267           return false;
7268         }
7269         if (!hasTableName()) {
7270           
7271           return false;
7272         }
7273         if (!hasEncodedRegionName()) {
7274           
7275           return false;
7276         }
7277         for (int i = 0; i < getStoreFlushesCount(); i++) {
7278           if (!getStoreFlushes(i).isInitialized()) {
7279             
7280             return false;
7281           }
7282         }
7283         return true;
7284       }
7285 
7286       public Builder mergeFrom(
7287           com.google.protobuf.CodedInputStream input,
7288           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7289           throws java.io.IOException {
7290         org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor parsedMessage = null;
7291         try {
7292           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
7293         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
7294           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor) e.getUnfinishedMessage();
7295           throw e;
7296         } finally {
7297           if (parsedMessage != null) {
7298             mergeFrom(parsedMessage);
7299           }
7300         }
7301         return this;
7302       }
7303       private int bitField0_;
7304 
7305       // required .hbase.pb.FlushDescriptor.FlushAction action = 1;
7306       private org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.FlushAction action_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.FlushAction.START_FLUSH;
7307       /**
7308        * <code>required .hbase.pb.FlushDescriptor.FlushAction action = 1;</code>
7309        */
7310       public boolean hasAction() {
7311         return ((bitField0_ & 0x00000001) == 0x00000001);
7312       }
7313       /**
7314        * <code>required .hbase.pb.FlushDescriptor.FlushAction action = 1;</code>
7315        */
7316       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.FlushAction getAction() {
7317         return action_;
7318       }
7319       /**
7320        * <code>required .hbase.pb.FlushDescriptor.FlushAction action = 1;</code>
7321        */
7322       public Builder setAction(org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.FlushAction value) {
7323         if (value == null) {
7324           throw new NullPointerException();
7325         }
7326         bitField0_ |= 0x00000001;
7327         action_ = value;
7328         onChanged();
7329         return this;
7330       }
7331       /**
7332        * <code>required .hbase.pb.FlushDescriptor.FlushAction action = 1;</code>
7333        */
7334       public Builder clearAction() {
7335         bitField0_ = (bitField0_ & ~0x00000001);
7336         action_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.FlushAction.START_FLUSH;
7337         onChanged();
7338         return this;
7339       }
7340 
7341       // required bytes table_name = 2;
7342       private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY;
7343       /**
7344        * <code>required bytes table_name = 2;</code>
7345        */
7346       public boolean hasTableName() {
7347         return ((bitField0_ & 0x00000002) == 0x00000002);
7348       }
7349       /**
7350        * <code>required bytes table_name = 2;</code>
7351        */
7352       public com.google.protobuf.ByteString getTableName() {
7353         return tableName_;
7354       }
7355       /**
7356        * <code>required bytes table_name = 2;</code>
7357        */
7358       public Builder setTableName(com.google.protobuf.ByteString value) {
7359         if (value == null) {
7360     throw new NullPointerException();
7361   }
7362   bitField0_ |= 0x00000002;
7363         tableName_ = value;
7364         onChanged();
7365         return this;
7366       }
7367       /**
7368        * <code>required bytes table_name = 2;</code>
7369        */
7370       public Builder clearTableName() {
7371         bitField0_ = (bitField0_ & ~0x00000002);
7372         tableName_ = getDefaultInstance().getTableName();
7373         onChanged();
7374         return this;
7375       }
7376 
7377       // required bytes encoded_region_name = 3;
7378       private com.google.protobuf.ByteString encodedRegionName_ = com.google.protobuf.ByteString.EMPTY;
7379       /**
7380        * <code>required bytes encoded_region_name = 3;</code>
7381        */
7382       public boolean hasEncodedRegionName() {
7383         return ((bitField0_ & 0x00000004) == 0x00000004);
7384       }
7385       /**
7386        * <code>required bytes encoded_region_name = 3;</code>
7387        */
7388       public com.google.protobuf.ByteString getEncodedRegionName() {
7389         return encodedRegionName_;
7390       }
7391       /**
7392        * <code>required bytes encoded_region_name = 3;</code>
7393        */
7394       public Builder setEncodedRegionName(com.google.protobuf.ByteString value) {
7395         if (value == null) {
7396     throw new NullPointerException();
7397   }
7398   bitField0_ |= 0x00000004;
7399         encodedRegionName_ = value;
7400         onChanged();
7401         return this;
7402       }
7403       /**
7404        * <code>required bytes encoded_region_name = 3;</code>
7405        */
7406       public Builder clearEncodedRegionName() {
7407         bitField0_ = (bitField0_ & ~0x00000004);
7408         encodedRegionName_ = getDefaultInstance().getEncodedRegionName();
7409         onChanged();
7410         return this;
7411       }
7412 
7413       // optional uint64 flush_sequence_number = 4;
7414       private long flushSequenceNumber_ ;
7415       /**
7416        * <code>optional uint64 flush_sequence_number = 4;</code>
7417        */
7418       public boolean hasFlushSequenceNumber() {
7419         return ((bitField0_ & 0x00000008) == 0x00000008);
7420       }
7421       /**
7422        * <code>optional uint64 flush_sequence_number = 4;</code>
7423        */
7424       public long getFlushSequenceNumber() {
7425         return flushSequenceNumber_;
7426       }
7427       /**
7428        * <code>optional uint64 flush_sequence_number = 4;</code>
7429        */
7430       public Builder setFlushSequenceNumber(long value) {
7431         bitField0_ |= 0x00000008;
7432         flushSequenceNumber_ = value;
7433         onChanged();
7434         return this;
7435       }
7436       /**
7437        * <code>optional uint64 flush_sequence_number = 4;</code>
7438        */
7439       public Builder clearFlushSequenceNumber() {
7440         bitField0_ = (bitField0_ & ~0x00000008);
7441         flushSequenceNumber_ = 0L;
7442         onChanged();
7443         return this;
7444       }
7445 
7446       // repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;
7447       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor> storeFlushes_ =
7448         java.util.Collections.emptyList();
7449       private void ensureStoreFlushesIsMutable() {
7450         if (!((bitField0_ & 0x00000010) == 0x00000010)) {
7451           storeFlushes_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor>(storeFlushes_);
7452           bitField0_ |= 0x00000010;
7453          }
7454       }
7455 
7456       private com.google.protobuf.RepeatedFieldBuilder<
7457           org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptorOrBuilder> storeFlushesBuilder_;
7458 
7459       /**
7460        * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code>
7461        */
7462       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor> getStoreFlushesList() {
7463         if (storeFlushesBuilder_ == null) {
7464           return java.util.Collections.unmodifiableList(storeFlushes_);
7465         } else {
7466           return storeFlushesBuilder_.getMessageList();
7467         }
7468       }
7469       /**
7470        * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code>
7471        */
7472       public int getStoreFlushesCount() {
7473         if (storeFlushesBuilder_ == null) {
7474           return storeFlushes_.size();
7475         } else {
7476           return storeFlushesBuilder_.getCount();
7477         }
7478       }
7479       /**
7480        * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code>
7481        */
7482       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor getStoreFlushes(int index) {
7483         if (storeFlushesBuilder_ == null) {
7484           return storeFlushes_.get(index);
7485         } else {
7486           return storeFlushesBuilder_.getMessage(index);
7487         }
7488       }
7489       /**
7490        * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code>
7491        */
7492       public Builder setStoreFlushes(
7493           int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor value) {
7494         if (storeFlushesBuilder_ == null) {
7495           if (value == null) {
7496             throw new NullPointerException();
7497           }
7498           ensureStoreFlushesIsMutable();
7499           storeFlushes_.set(index, value);
7500           onChanged();
7501         } else {
7502           storeFlushesBuilder_.setMessage(index, value);
7503         }
7504         return this;
7505       }
7506       /**
7507        * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code>
7508        */
7509       public Builder setStoreFlushes(
7510           int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder builderForValue) {
7511         if (storeFlushesBuilder_ == null) {
7512           ensureStoreFlushesIsMutable();
7513           storeFlushes_.set(index, builderForValue.build());
7514           onChanged();
7515         } else {
7516           storeFlushesBuilder_.setMessage(index, builderForValue.build());
7517         }
7518         return this;
7519       }
7520       /**
7521        * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code>
7522        */
7523       public Builder addStoreFlushes(org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor value) {
7524         if (storeFlushesBuilder_ == null) {
7525           if (value == null) {
7526             throw new NullPointerException();
7527           }
7528           ensureStoreFlushesIsMutable();
7529           storeFlushes_.add(value);
7530           onChanged();
7531         } else {
7532           storeFlushesBuilder_.addMessage(value);
7533         }
7534         return this;
7535       }
7536       /**
7537        * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code>
7538        */
7539       public Builder addStoreFlushes(
7540           int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor value) {
7541         if (storeFlushesBuilder_ == null) {
7542           if (value == null) {
7543             throw new NullPointerException();
7544           }
7545           ensureStoreFlushesIsMutable();
7546           storeFlushes_.add(index, value);
7547           onChanged();
7548         } else {
7549           storeFlushesBuilder_.addMessage(index, value);
7550         }
7551         return this;
7552       }
7553       /**
7554        * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code>
7555        */
7556       public Builder addStoreFlushes(
7557           org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder builderForValue) {
7558         if (storeFlushesBuilder_ == null) {
7559           ensureStoreFlushesIsMutable();
7560           storeFlushes_.add(builderForValue.build());
7561           onChanged();
7562         } else {
7563           storeFlushesBuilder_.addMessage(builderForValue.build());
7564         }
7565         return this;
7566       }
7567       /**
7568        * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code>
7569        */
7570       public Builder addStoreFlushes(
7571           int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder builderForValue) {
7572         if (storeFlushesBuilder_ == null) {
7573           ensureStoreFlushesIsMutable();
7574           storeFlushes_.add(index, builderForValue.build());
7575           onChanged();
7576         } else {
7577           storeFlushesBuilder_.addMessage(index, builderForValue.build());
7578         }
7579         return this;
7580       }
7581       /**
7582        * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code>
7583        */
7584       public Builder addAllStoreFlushes(
7585           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor> values) {
7586         if (storeFlushesBuilder_ == null) {
7587           ensureStoreFlushesIsMutable();
7588           super.addAll(values, storeFlushes_);
7589           onChanged();
7590         } else {
7591           storeFlushesBuilder_.addAllMessages(values);
7592         }
7593         return this;
7594       }
7595       /**
7596        * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code>
7597        */
7598       public Builder clearStoreFlushes() {
7599         if (storeFlushesBuilder_ == null) {
7600           storeFlushes_ = java.util.Collections.emptyList();
7601           bitField0_ = (bitField0_ & ~0x00000010);
7602           onChanged();
7603         } else {
7604           storeFlushesBuilder_.clear();
7605         }
7606         return this;
7607       }
7608       /**
7609        * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code>
7610        */
7611       public Builder removeStoreFlushes(int index) {
7612         if (storeFlushesBuilder_ == null) {
7613           ensureStoreFlushesIsMutable();
7614           storeFlushes_.remove(index);
7615           onChanged();
7616         } else {
7617           storeFlushesBuilder_.remove(index);
7618         }
7619         return this;
7620       }
7621       /**
7622        * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code>
7623        */
7624       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder getStoreFlushesBuilder(
7625           int index) {
7626         return getStoreFlushesFieldBuilder().getBuilder(index);
7627       }
7628       /**
7629        * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code>
7630        */
7631       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptorOrBuilder getStoreFlushesOrBuilder(
7632           int index) {
7633         if (storeFlushesBuilder_ == null) {
7634           return storeFlushes_.get(index);  } else {
7635           return storeFlushesBuilder_.getMessageOrBuilder(index);
7636         }
7637       }
7638       /**
7639        * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code>
7640        */
7641       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptorOrBuilder> 
7642            getStoreFlushesOrBuilderList() {
7643         if (storeFlushesBuilder_ != null) {
7644           return storeFlushesBuilder_.getMessageOrBuilderList();
7645         } else {
7646           return java.util.Collections.unmodifiableList(storeFlushes_);
7647         }
7648       }
7649       /**
7650        * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code>
7651        */
7652       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder addStoreFlushesBuilder() {
7653         return getStoreFlushesFieldBuilder().addBuilder(
7654             org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.getDefaultInstance());
7655       }
7656       /**
7657        * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code>
7658        */
7659       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder addStoreFlushesBuilder(
7660           int index) {
7661         return getStoreFlushesFieldBuilder().addBuilder(
7662             index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.getDefaultInstance());
7663       }
7664       /**
7665        * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code>
7666        */
7667       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder> 
7668            getStoreFlushesBuilderList() {
7669         return getStoreFlushesFieldBuilder().getBuilderList();
7670       }
7671       private com.google.protobuf.RepeatedFieldBuilder<
7672           org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptorOrBuilder> 
7673           getStoreFlushesFieldBuilder() {
7674         if (storeFlushesBuilder_ == null) {
7675           storeFlushesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
7676               org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptorOrBuilder>(
7677                   storeFlushes_,
7678                   ((bitField0_ & 0x00000010) == 0x00000010),
7679                   getParentForChildren(),
7680                   isClean());
7681           storeFlushes_ = null;
7682         }
7683         return storeFlushesBuilder_;
7684       }
7685 
7686       // optional bytes region_name = 6;
7687       private com.google.protobuf.ByteString regionName_ = com.google.protobuf.ByteString.EMPTY;
7688       /**
7689        * <code>optional bytes region_name = 6;</code>
7690        *
7691        * <pre>
7692        * full region name
7693        * </pre>
7694        */
7695       public boolean hasRegionName() {
7696         return ((bitField0_ & 0x00000020) == 0x00000020);
7697       }
7698       /**
7699        * <code>optional bytes region_name = 6;</code>
7700        *
7701        * <pre>
7702        * full region name
7703        * </pre>
7704        */
7705       public com.google.protobuf.ByteString getRegionName() {
7706         return regionName_;
7707       }
7708       /**
7709        * <code>optional bytes region_name = 6;</code>
7710        *
7711        * <pre>
7712        * full region name
7713        * </pre>
7714        */
7715       public Builder setRegionName(com.google.protobuf.ByteString value) {
7716         if (value == null) {
7717     throw new NullPointerException();
7718   }
7719   bitField0_ |= 0x00000020;
7720         regionName_ = value;
7721         onChanged();
7722         return this;
7723       }
7724       /**
7725        * <code>optional bytes region_name = 6;</code>
7726        *
7727        * <pre>
7728        * full region name
7729        * </pre>
7730        */
7731       public Builder clearRegionName() {
7732         bitField0_ = (bitField0_ & ~0x00000020);
7733         regionName_ = getDefaultInstance().getRegionName();
7734         onChanged();
7735         return this;
7736       }
7737 
7738       // @@protoc_insertion_point(builder_scope:hbase.pb.FlushDescriptor)
7739     }
7740 
7741     static {
7742       defaultInstance = new FlushDescriptor(true);
7743       defaultInstance.initFields();
7744     }
7745 
7746     // @@protoc_insertion_point(class_scope:hbase.pb.FlushDescriptor)
7747   }
7748 
7749   public interface StoreDescriptorOrBuilder
7750       extends com.google.protobuf.MessageOrBuilder {
7751 
7752     // required bytes family_name = 1;
7753     /**
7754      * <code>required bytes family_name = 1;</code>
7755      */
7756     boolean hasFamilyName();
7757     /**
7758      * <code>required bytes family_name = 1;</code>
7759      */
7760     com.google.protobuf.ByteString getFamilyName();
7761 
7762     // required string store_home_dir = 2;
7763     /**
7764      * <code>required string store_home_dir = 2;</code>
7765      *
7766      * <pre>
7767      *relative to region dir
7768      * </pre>
7769      */
7770     boolean hasStoreHomeDir();
7771     /**
7772      * <code>required string store_home_dir = 2;</code>
7773      *
7774      * <pre>
7775      *relative to region dir
7776      * </pre>
7777      */
7778     java.lang.String getStoreHomeDir();
7779     /**
7780      * <code>required string store_home_dir = 2;</code>
7781      *
7782      * <pre>
7783      *relative to region dir
7784      * </pre>
7785      */
7786     com.google.protobuf.ByteString
7787         getStoreHomeDirBytes();
7788 
7789     // repeated string store_file = 3;
7790     /**
7791      * <code>repeated string store_file = 3;</code>
7792      *
7793      * <pre>
7794      * relative to store dir
7795      * </pre>
7796      */
7797     java.util.List<java.lang.String>
7798     getStoreFileList();
7799     /**
7800      * <code>repeated string store_file = 3;</code>
7801      *
7802      * <pre>
7803      * relative to store dir
7804      * </pre>
7805      */
7806     int getStoreFileCount();
7807     /**
7808      * <code>repeated string store_file = 3;</code>
7809      *
7810      * <pre>
7811      * relative to store dir
7812      * </pre>
7813      */
7814     java.lang.String getStoreFile(int index);
7815     /**
7816      * <code>repeated string store_file = 3;</code>
7817      *
7818      * <pre>
7819      * relative to store dir
7820      * </pre>
7821      */
7822     com.google.protobuf.ByteString
7823         getStoreFileBytes(int index);
7824   }
7825   /**
7826    * Protobuf type {@code hbase.pb.StoreDescriptor}
7827    */
7828   public static final class StoreDescriptor extends
7829       com.google.protobuf.GeneratedMessage
7830       implements StoreDescriptorOrBuilder {
7831     // Use StoreDescriptor.newBuilder() to construct.
7832     private StoreDescriptor(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
7833       super(builder);
7834       this.unknownFields = builder.getUnknownFields();
7835     }
7836     private StoreDescriptor(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
7837 
7838     private static final StoreDescriptor defaultInstance;
7839     public static StoreDescriptor getDefaultInstance() {
7840       return defaultInstance;
7841     }
7842 
7843     public StoreDescriptor getDefaultInstanceForType() {
7844       return defaultInstance;
7845     }
7846 
7847     private final com.google.protobuf.UnknownFieldSet unknownFields;
7848     @java.lang.Override
7849     public final com.google.protobuf.UnknownFieldSet
7850         getUnknownFields() {
7851       return this.unknownFields;
7852     }
7853     private StoreDescriptor(
7854         com.google.protobuf.CodedInputStream input,
7855         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7856         throws com.google.protobuf.InvalidProtocolBufferException {
7857       initFields();
7858       int mutable_bitField0_ = 0;
7859       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
7860           com.google.protobuf.UnknownFieldSet.newBuilder();
7861       try {
7862         boolean done = false;
7863         while (!done) {
7864           int tag = input.readTag();
7865           switch (tag) {
7866             case 0:
7867               done = true;
7868               break;
7869             default: {
7870               if (!parseUnknownField(input, unknownFields,
7871                                      extensionRegistry, tag)) {
7872                 done = true;
7873               }
7874               break;
7875             }
7876             case 10: {
7877               bitField0_ |= 0x00000001;
7878               familyName_ = input.readBytes();
7879               break;
7880             }
7881             case 18: {
7882               bitField0_ |= 0x00000002;
7883               storeHomeDir_ = input.readBytes();
7884               break;
7885             }
7886             case 26: {
7887               if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
7888                 storeFile_ = new com.google.protobuf.LazyStringArrayList();
7889                 mutable_bitField0_ |= 0x00000004;
7890               }
7891               storeFile_.add(input.readBytes());
7892               break;
7893             }
7894           }
7895         }
7896       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
7897         throw e.setUnfinishedMessage(this);
7898       } catch (java.io.IOException e) {
7899         throw new com.google.protobuf.InvalidProtocolBufferException(
7900             e.getMessage()).setUnfinishedMessage(this);
7901       } finally {
7902         if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
7903           storeFile_ = new com.google.protobuf.UnmodifiableLazyStringList(storeFile_);
7904         }
7905         this.unknownFields = unknownFields.build();
7906         makeExtensionsImmutable();
7907       }
7908     }
7909     public static final com.google.protobuf.Descriptors.Descriptor
7910         getDescriptor() {
7911       return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_StoreDescriptor_descriptor;
7912     }
7913 
7914     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
7915         internalGetFieldAccessorTable() {
7916       return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_StoreDescriptor_fieldAccessorTable
7917           .ensureFieldAccessorsInitialized(
7918               org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder.class);
7919     }
7920 
7921     public static com.google.protobuf.Parser<StoreDescriptor> PARSER =
7922         new com.google.protobuf.AbstractParser<StoreDescriptor>() {
7923       public StoreDescriptor parsePartialFrom(
7924           com.google.protobuf.CodedInputStream input,
7925           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7926           throws com.google.protobuf.InvalidProtocolBufferException {
7927         return new StoreDescriptor(input, extensionRegistry);
7928       }
7929     };
7930 
7931     @java.lang.Override
7932     public com.google.protobuf.Parser<StoreDescriptor> getParserForType() {
7933       return PARSER;
7934     }
7935 
7936     private int bitField0_;
7937     // required bytes family_name = 1;
7938     public static final int FAMILY_NAME_FIELD_NUMBER = 1;
7939     private com.google.protobuf.ByteString familyName_;
7940     /**
7941      * <code>required bytes family_name = 1;</code>
7942      */
7943     public boolean hasFamilyName() {
7944       return ((bitField0_ & 0x00000001) == 0x00000001);
7945     }
7946     /**
7947      * <code>required bytes family_name = 1;</code>
7948      */
7949     public com.google.protobuf.ByteString getFamilyName() {
7950       return familyName_;
7951     }
7952 
7953     // required string store_home_dir = 2;
7954     public static final int STORE_HOME_DIR_FIELD_NUMBER = 2;
7955     private java.lang.Object storeHomeDir_;
7956     /**
7957      * <code>required string store_home_dir = 2;</code>
7958      *
7959      * <pre>
7960      *relative to region dir
7961      * </pre>
7962      */
7963     public boolean hasStoreHomeDir() {
7964       return ((bitField0_ & 0x00000002) == 0x00000002);
7965     }
7966     /**
7967      * <code>required string store_home_dir = 2;</code>
7968      *
7969      * <pre>
7970      *relative to region dir
7971      * </pre>
7972      */
7973     public java.lang.String getStoreHomeDir() {
7974       java.lang.Object ref = storeHomeDir_;
7975       if (ref instanceof java.lang.String) {
7976         return (java.lang.String) ref;
7977       } else {
7978         com.google.protobuf.ByteString bs = 
7979             (com.google.protobuf.ByteString) ref;
7980         java.lang.String s = bs.toStringUtf8();
7981         if (bs.isValidUtf8()) {
7982           storeHomeDir_ = s;
7983         }
7984         return s;
7985       }
7986     }
7987     /**
7988      * <code>required string store_home_dir = 2;</code>
7989      *
7990      * <pre>
7991      *relative to region dir
7992      * </pre>
7993      */
7994     public com.google.protobuf.ByteString
7995         getStoreHomeDirBytes() {
7996       java.lang.Object ref = storeHomeDir_;
7997       if (ref instanceof java.lang.String) {
7998         com.google.protobuf.ByteString b = 
7999             com.google.protobuf.ByteString.copyFromUtf8(
8000                 (java.lang.String) ref);
8001         storeHomeDir_ = b;
8002         return b;
8003       } else {
8004         return (com.google.protobuf.ByteString) ref;
8005       }
8006     }
8007 
8008     // repeated string store_file = 3;
8009     public static final int STORE_FILE_FIELD_NUMBER = 3;
8010     private com.google.protobuf.LazyStringList storeFile_;
8011     /**
8012      * <code>repeated string store_file = 3;</code>
8013      *
8014      * <pre>
8015      * relative to store dir
8016      * </pre>
8017      */
8018     public java.util.List<java.lang.String>
8019         getStoreFileList() {
8020       return storeFile_;
8021     }
8022     /**
8023      * <code>repeated string store_file = 3;</code>
8024      *
8025      * <pre>
8026      * relative to store dir
8027      * </pre>
8028      */
8029     public int getStoreFileCount() {
8030       return storeFile_.size();
8031     }
8032     /**
8033      * <code>repeated string store_file = 3;</code>
8034      *
8035      * <pre>
8036      * relative to store dir
8037      * </pre>
8038      */
8039     public java.lang.String getStoreFile(int index) {
8040       return storeFile_.get(index);
8041     }
8042     /**
8043      * <code>repeated string store_file = 3;</code>
8044      *
8045      * <pre>
8046      * relative to store dir
8047      * </pre>
8048      */
8049     public com.google.protobuf.ByteString
8050         getStoreFileBytes(int index) {
8051       return storeFile_.getByteString(index);
8052     }
8053 
8054     private void initFields() {
8055       familyName_ = com.google.protobuf.ByteString.EMPTY;
8056       storeHomeDir_ = "";
8057       storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY;
8058     }
8059     private byte memoizedIsInitialized = -1;
8060     public final boolean isInitialized() {
8061       byte isInitialized = memoizedIsInitialized;
8062       if (isInitialized != -1) return isInitialized == 1;
8063 
8064       if (!hasFamilyName()) {
8065         memoizedIsInitialized = 0;
8066         return false;
8067       }
8068       if (!hasStoreHomeDir()) {
8069         memoizedIsInitialized = 0;
8070         return false;
8071       }
8072       memoizedIsInitialized = 1;
8073       return true;
8074     }
8075 
8076     public void writeTo(com.google.protobuf.CodedOutputStream output)
8077                         throws java.io.IOException {
8078       getSerializedSize();
8079       if (((bitField0_ & 0x00000001) == 0x00000001)) {
8080         output.writeBytes(1, familyName_);
8081       }
8082       if (((bitField0_ & 0x00000002) == 0x00000002)) {
8083         output.writeBytes(2, getStoreHomeDirBytes());
8084       }
8085       for (int i = 0; i < storeFile_.size(); i++) {
8086         output.writeBytes(3, storeFile_.getByteString(i));
8087       }
8088       getUnknownFields().writeTo(output);
8089     }
8090 
8091     private int memoizedSerializedSize = -1;
8092     public int getSerializedSize() {
8093       int size = memoizedSerializedSize;
8094       if (size != -1) return size;
8095 
8096       size = 0;
8097       if (((bitField0_ & 0x00000001) == 0x00000001)) {
8098         size += com.google.protobuf.CodedOutputStream
8099           .computeBytesSize(1, familyName_);
8100       }
8101       if (((bitField0_ & 0x00000002) == 0x00000002)) {
8102         size += com.google.protobuf.CodedOutputStream
8103           .computeBytesSize(2, getStoreHomeDirBytes());
8104       }
8105       {
8106         int dataSize = 0;
8107         for (int i = 0; i < storeFile_.size(); i++) {
8108           dataSize += com.google.protobuf.CodedOutputStream
8109             .computeBytesSizeNoTag(storeFile_.getByteString(i));
8110         }
8111         size += dataSize;
8112         size += 1 * getStoreFileList().size();
8113       }
8114       size += getUnknownFields().getSerializedSize();
8115       memoizedSerializedSize = size;
8116       return size;
8117     }
8118 
8119     private static final long serialVersionUID = 0L;
8120     @java.lang.Override
8121     protected java.lang.Object writeReplace()
8122         throws java.io.ObjectStreamException {
8123       return super.writeReplace();
8124     }
8125 
8126     @java.lang.Override
8127     public boolean equals(final java.lang.Object obj) {
8128       if (obj == this) {
8129        return true;
8130       }
8131       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor)) {
8132         return super.equals(obj);
8133       }
8134       org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor other = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor) obj;
8135 
8136       boolean result = true;
8137       result = result && (hasFamilyName() == other.hasFamilyName());
8138       if (hasFamilyName()) {
8139         result = result && getFamilyName()
8140             .equals(other.getFamilyName());
8141       }
8142       result = result && (hasStoreHomeDir() == other.hasStoreHomeDir());
8143       if (hasStoreHomeDir()) {
8144         result = result && getStoreHomeDir()
8145             .equals(other.getStoreHomeDir());
8146       }
8147       result = result && getStoreFileList()
8148           .equals(other.getStoreFileList());
8149       result = result &&
8150           getUnknownFields().equals(other.getUnknownFields());
8151       return result;
8152     }
8153 
8154     private int memoizedHashCode = 0;
8155     @java.lang.Override
8156     public int hashCode() {
8157       if (memoizedHashCode != 0) {
8158         return memoizedHashCode;
8159       }
8160       int hash = 41;
8161       hash = (19 * hash) + getDescriptorForType().hashCode();
8162       if (hasFamilyName()) {
8163         hash = (37 * hash) + FAMILY_NAME_FIELD_NUMBER;
8164         hash = (53 * hash) + getFamilyName().hashCode();
8165       }
8166       if (hasStoreHomeDir()) {
8167         hash = (37 * hash) + STORE_HOME_DIR_FIELD_NUMBER;
8168         hash = (53 * hash) + getStoreHomeDir().hashCode();
8169       }
8170       if (getStoreFileCount() > 0) {
8171         hash = (37 * hash) + STORE_FILE_FIELD_NUMBER;
8172         hash = (53 * hash) + getStoreFileList().hashCode();
8173       }
8174       hash = (29 * hash) + getUnknownFields().hashCode();
8175       memoizedHashCode = hash;
8176       return hash;
8177     }
8178 
8179     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor parseFrom(
8180         com.google.protobuf.ByteString data)
8181         throws com.google.protobuf.InvalidProtocolBufferException {
8182       return PARSER.parseFrom(data);
8183     }
8184     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor parseFrom(
8185         com.google.protobuf.ByteString data,
8186         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8187         throws com.google.protobuf.InvalidProtocolBufferException {
8188       return PARSER.parseFrom(data, extensionRegistry);
8189     }
8190     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor parseFrom(byte[] data)
8191         throws com.google.protobuf.InvalidProtocolBufferException {
8192       return PARSER.parseFrom(data);
8193     }
8194     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor parseFrom(
8195         byte[] data,
8196         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8197         throws com.google.protobuf.InvalidProtocolBufferException {
8198       return PARSER.parseFrom(data, extensionRegistry);
8199     }
8200     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor parseFrom(java.io.InputStream input)
8201         throws java.io.IOException {
8202       return PARSER.parseFrom(input);
8203     }
8204     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor parseFrom(
8205         java.io.InputStream input,
8206         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8207         throws java.io.IOException {
8208       return PARSER.parseFrom(input, extensionRegistry);
8209     }
8210     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor parseDelimitedFrom(java.io.InputStream input)
8211         throws java.io.IOException {
8212       return PARSER.parseDelimitedFrom(input);
8213     }
8214     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor parseDelimitedFrom(
8215         java.io.InputStream input,
8216         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8217         throws java.io.IOException {
8218       return PARSER.parseDelimitedFrom(input, extensionRegistry);
8219     }
8220     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor parseFrom(
8221         com.google.protobuf.CodedInputStream input)
8222         throws java.io.IOException {
8223       return PARSER.parseFrom(input);
8224     }
8225     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor parseFrom(
8226         com.google.protobuf.CodedInputStream input,
8227         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8228         throws java.io.IOException {
8229       return PARSER.parseFrom(input, extensionRegistry);
8230     }
8231 
8232     public static Builder newBuilder() { return Builder.create(); }
8233     public Builder newBuilderForType() { return newBuilder(); }
8234     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor prototype) {
8235       return newBuilder().mergeFrom(prototype);
8236     }
8237     public Builder toBuilder() { return newBuilder(this); }
8238 
8239     @java.lang.Override
8240     protected Builder newBuilderForType(
8241         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
8242       Builder builder = new Builder(parent);
8243       return builder;
8244     }
8245     /**
8246      * Protobuf type {@code hbase.pb.StoreDescriptor}
8247      */
8248     public static final class Builder extends
8249         com.google.protobuf.GeneratedMessage.Builder<Builder>
8250        implements org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder {
8251       public static final com.google.protobuf.Descriptors.Descriptor
8252           getDescriptor() {
8253         return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_StoreDescriptor_descriptor;
8254       }
8255 
8256       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
8257           internalGetFieldAccessorTable() {
8258         return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_StoreDescriptor_fieldAccessorTable
8259             .ensureFieldAccessorsInitialized(
8260                 org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder.class);
8261       }
8262 
8263       // Construct using org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.newBuilder()
8264       private Builder() {
8265         maybeForceBuilderInitialization();
8266       }
8267 
8268       private Builder(
8269           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
8270         super(parent);
8271         maybeForceBuilderInitialization();
8272       }
8273       private void maybeForceBuilderInitialization() {
8274         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
8275         }
8276       }
8277       private static Builder create() {
8278         return new Builder();
8279       }
8280 
8281       public Builder clear() {
8282         super.clear();
8283         familyName_ = com.google.protobuf.ByteString.EMPTY;
8284         bitField0_ = (bitField0_ & ~0x00000001);
8285         storeHomeDir_ = "";
8286         bitField0_ = (bitField0_ & ~0x00000002);
8287         storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY;
8288         bitField0_ = (bitField0_ & ~0x00000004);
8289         return this;
8290       }
8291 
8292       public Builder clone() {
8293         return create().mergeFrom(buildPartial());
8294       }
8295 
8296       public com.google.protobuf.Descriptors.Descriptor
8297           getDescriptorForType() {
8298         return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_StoreDescriptor_descriptor;
8299       }
8300 
8301       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor getDefaultInstanceForType() {
8302         return org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.getDefaultInstance();
8303       }
8304 
8305       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor build() {
8306         org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor result = buildPartial();
8307         if (!result.isInitialized()) {
8308           throw newUninitializedMessageException(result);
8309         }
8310         return result;
8311       }
8312 
8313       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor buildPartial() {
8314         org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor(this);
8315         int from_bitField0_ = bitField0_;
8316         int to_bitField0_ = 0;
8317         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
8318           to_bitField0_ |= 0x00000001;
8319         }
8320         result.familyName_ = familyName_;
8321         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
8322           to_bitField0_ |= 0x00000002;
8323         }
8324         result.storeHomeDir_ = storeHomeDir_;
8325         if (((bitField0_ & 0x00000004) == 0x00000004)) {
8326           storeFile_ = new com.google.protobuf.UnmodifiableLazyStringList(
8327               storeFile_);
8328           bitField0_ = (bitField0_ & ~0x00000004);
8329         }
8330         result.storeFile_ = storeFile_;
8331         result.bitField0_ = to_bitField0_;
8332         onBuilt();
8333         return result;
8334       }
8335 
8336       public Builder mergeFrom(com.google.protobuf.Message other) {
8337         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor) {
8338           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor)other);
8339         } else {
8340           super.mergeFrom(other);
8341           return this;
8342         }
8343       }
8344 
8345       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor other) {
8346         if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.getDefaultInstance()) return this;
8347         if (other.hasFamilyName()) {
8348           setFamilyName(other.getFamilyName());
8349         }
8350         if (other.hasStoreHomeDir()) {
8351           bitField0_ |= 0x00000002;
8352           storeHomeDir_ = other.storeHomeDir_;
8353           onChanged();
8354         }
8355         if (!other.storeFile_.isEmpty()) {
8356           if (storeFile_.isEmpty()) {
8357             storeFile_ = other.storeFile_;
8358             bitField0_ = (bitField0_ & ~0x00000004);
8359           } else {
8360             ensureStoreFileIsMutable();
8361             storeFile_.addAll(other.storeFile_);
8362           }
8363           onChanged();
8364         }
8365         this.mergeUnknownFields(other.getUnknownFields());
8366         return this;
8367       }
8368 
8369       public final boolean isInitialized() {
8370         if (!hasFamilyName()) {
8371           
8372           return false;
8373         }
8374         if (!hasStoreHomeDir()) {
8375           
8376           return false;
8377         }
8378         return true;
8379       }
8380 
8381       public Builder mergeFrom(
8382           com.google.protobuf.CodedInputStream input,
8383           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8384           throws java.io.IOException {
8385         org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor parsedMessage = null;
8386         try {
8387           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
8388         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
8389           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor) e.getUnfinishedMessage();
8390           throw e;
8391         } finally {
8392           if (parsedMessage != null) {
8393             mergeFrom(parsedMessage);
8394           }
8395         }
8396         return this;
8397       }
8398       private int bitField0_;
8399 
8400       // required bytes family_name = 1;
8401       private com.google.protobuf.ByteString familyName_ = com.google.protobuf.ByteString.EMPTY;
8402       /**
8403        * <code>required bytes family_name = 1;</code>
8404        */
8405       public boolean hasFamilyName() {
8406         return ((bitField0_ & 0x00000001) == 0x00000001);
8407       }
8408       /**
8409        * <code>required bytes family_name = 1;</code>
8410        */
8411       public com.google.protobuf.ByteString getFamilyName() {
8412         return familyName_;
8413       }
8414       /**
8415        * <code>required bytes family_name = 1;</code>
8416        */
8417       public Builder setFamilyName(com.google.protobuf.ByteString value) {
8418         if (value == null) {
8419     throw new NullPointerException();
8420   }
8421   bitField0_ |= 0x00000001;
8422         familyName_ = value;
8423         onChanged();
8424         return this;
8425       }
8426       /**
8427        * <code>required bytes family_name = 1;</code>
8428        */
8429       public Builder clearFamilyName() {
8430         bitField0_ = (bitField0_ & ~0x00000001);
8431         familyName_ = getDefaultInstance().getFamilyName();
8432         onChanged();
8433         return this;
8434       }
8435 
8436       // required string store_home_dir = 2;
8437       private java.lang.Object storeHomeDir_ = "";
8438       /**
8439        * <code>required string store_home_dir = 2;</code>
8440        *
8441        * <pre>
8442        *relative to region dir
8443        * </pre>
8444        */
8445       public boolean hasStoreHomeDir() {
8446         return ((bitField0_ & 0x00000002) == 0x00000002);
8447       }
8448       /**
8449        * <code>required string store_home_dir = 2;</code>
8450        *
8451        * <pre>
8452        *relative to region dir
8453        * </pre>
8454        */
8455       public java.lang.String getStoreHomeDir() {
8456         java.lang.Object ref = storeHomeDir_;
8457         if (!(ref instanceof java.lang.String)) {
8458           java.lang.String s = ((com.google.protobuf.ByteString) ref)
8459               .toStringUtf8();
8460           storeHomeDir_ = s;
8461           return s;
8462         } else {
8463           return (java.lang.String) ref;
8464         }
8465       }
8466       /**
8467        * <code>required string store_home_dir = 2;</code>
8468        *
8469        * <pre>
8470        *relative to region dir
8471        * </pre>
8472        */
8473       public com.google.protobuf.ByteString
8474           getStoreHomeDirBytes() {
8475         java.lang.Object ref = storeHomeDir_;
8476         if (ref instanceof String) {
8477           com.google.protobuf.ByteString b = 
8478               com.google.protobuf.ByteString.copyFromUtf8(
8479                   (java.lang.String) ref);
8480           storeHomeDir_ = b;
8481           return b;
8482         } else {
8483           return (com.google.protobuf.ByteString) ref;
8484         }
8485       }
8486       /**
8487        * <code>required string store_home_dir = 2;</code>
8488        *
8489        * <pre>
8490        *relative to region dir
8491        * </pre>
8492        */
8493       public Builder setStoreHomeDir(
8494           java.lang.String value) {
8495         if (value == null) {
8496     throw new NullPointerException();
8497   }
8498   bitField0_ |= 0x00000002;
8499         storeHomeDir_ = value;
8500         onChanged();
8501         return this;
8502       }
8503       /**
8504        * <code>required string store_home_dir = 2;</code>
8505        *
8506        * <pre>
8507        *relative to region dir
8508        * </pre>
8509        */
8510       public Builder clearStoreHomeDir() {
8511         bitField0_ = (bitField0_ & ~0x00000002);
8512         storeHomeDir_ = getDefaultInstance().getStoreHomeDir();
8513         onChanged();
8514         return this;
8515       }
8516       /**
8517        * <code>required string store_home_dir = 2;</code>
8518        *
8519        * <pre>
8520        *relative to region dir
8521        * </pre>
8522        */
8523       public Builder setStoreHomeDirBytes(
8524           com.google.protobuf.ByteString value) {
8525         if (value == null) {
8526     throw new NullPointerException();
8527   }
8528   bitField0_ |= 0x00000002;
8529         storeHomeDir_ = value;
8530         onChanged();
8531         return this;
8532       }
8533 
8534       // repeated string store_file = 3;
8535       private com.google.protobuf.LazyStringList storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY;
8536       private void ensureStoreFileIsMutable() {
8537         if (!((bitField0_ & 0x00000004) == 0x00000004)) {
8538           storeFile_ = new com.google.protobuf.LazyStringArrayList(storeFile_);
8539           bitField0_ |= 0x00000004;
8540          }
8541       }
8542       /**
8543        * <code>repeated string store_file = 3;</code>
8544        *
8545        * <pre>
8546        * relative to store dir
8547        * </pre>
8548        */
8549       public java.util.List<java.lang.String>
8550           getStoreFileList() {
8551         return java.util.Collections.unmodifiableList(storeFile_);
8552       }
8553       /**
8554        * <code>repeated string store_file = 3;</code>
8555        *
8556        * <pre>
8557        * relative to store dir
8558        * </pre>
8559        */
8560       public int getStoreFileCount() {
8561         return storeFile_.size();
8562       }
8563       /**
8564        * <code>repeated string store_file = 3;</code>
8565        *
8566        * <pre>
8567        * relative to store dir
8568        * </pre>
8569        */
8570       public java.lang.String getStoreFile(int index) {
8571         return storeFile_.get(index);
8572       }
8573       /**
8574        * <code>repeated string store_file = 3;</code>
8575        *
8576        * <pre>
8577        * relative to store dir
8578        * </pre>
8579        */
8580       public com.google.protobuf.ByteString
8581           getStoreFileBytes(int index) {
8582         return storeFile_.getByteString(index);
8583       }
8584       /**
8585        * <code>repeated string store_file = 3;</code>
8586        *
8587        * <pre>
8588        * relative to store dir
8589        * </pre>
8590        */
8591       public Builder setStoreFile(
8592           int index, java.lang.String value) {
8593         if (value == null) {
8594     throw new NullPointerException();
8595   }
8596   ensureStoreFileIsMutable();
8597         storeFile_.set(index, value);
8598         onChanged();
8599         return this;
8600       }
8601       /**
8602        * <code>repeated string store_file = 3;</code>
8603        *
8604        * <pre>
8605        * relative to store dir
8606        * </pre>
8607        */
8608       public Builder addStoreFile(
8609           java.lang.String value) {
8610         if (value == null) {
8611     throw new NullPointerException();
8612   }
8613   ensureStoreFileIsMutable();
8614         storeFile_.add(value);
8615         onChanged();
8616         return this;
8617       }
8618       /**
8619        * <code>repeated string store_file = 3;</code>
8620        *
8621        * <pre>
8622        * relative to store dir
8623        * </pre>
8624        */
8625       public Builder addAllStoreFile(
8626           java.lang.Iterable<java.lang.String> values) {
8627         ensureStoreFileIsMutable();
8628         super.addAll(values, storeFile_);
8629         onChanged();
8630         return this;
8631       }
8632       /**
8633        * <code>repeated string store_file = 3;</code>
8634        *
8635        * <pre>
8636        * relative to store dir
8637        * </pre>
8638        */
8639       public Builder clearStoreFile() {
8640         storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY;
8641         bitField0_ = (bitField0_ & ~0x00000004);
8642         onChanged();
8643         return this;
8644       }
8645       /**
8646        * <code>repeated string store_file = 3;</code>
8647        *
8648        * <pre>
8649        * relative to store dir
8650        * </pre>
8651        */
8652       public Builder addStoreFileBytes(
8653           com.google.protobuf.ByteString value) {
8654         if (value == null) {
8655     throw new NullPointerException();
8656   }
8657   ensureStoreFileIsMutable();
8658         storeFile_.add(value);
8659         onChanged();
8660         return this;
8661       }
8662 
8663       // @@protoc_insertion_point(builder_scope:hbase.pb.StoreDescriptor)
8664     }
8665 
8666     static {
8667       defaultInstance = new StoreDescriptor(true);
8668       defaultInstance.initFields();
8669     }
8670 
8671     // @@protoc_insertion_point(class_scope:hbase.pb.StoreDescriptor)
8672   }
8673 
8674   public interface BulkLoadDescriptorOrBuilder
8675       extends com.google.protobuf.MessageOrBuilder {
8676 
8677     // required .hbase.pb.TableName table_name = 1;
8678     /**
8679      * <code>required .hbase.pb.TableName table_name = 1;</code>
8680      */
8681     boolean hasTableName();
8682     /**
8683      * <code>required .hbase.pb.TableName table_name = 1;</code>
8684      */
8685     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName();
8686     /**
8687      * <code>required .hbase.pb.TableName table_name = 1;</code>
8688      */
8689     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
8690 
8691     // required bytes encoded_region_name = 2;
8692     /**
8693      * <code>required bytes encoded_region_name = 2;</code>
8694      */
8695     boolean hasEncodedRegionName();
8696     /**
8697      * <code>required bytes encoded_region_name = 2;</code>
8698      */
8699     com.google.protobuf.ByteString getEncodedRegionName();
8700 
8701     // repeated .hbase.pb.StoreDescriptor stores = 3;
8702     /**
8703      * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code>
8704      */
8705     java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor> 
8706         getStoresList();
8707     /**
8708      * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code>
8709      */
8710     org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor getStores(int index);
8711     /**
8712      * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code>
8713      */
8714     int getStoresCount();
8715     /**
8716      * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code>
8717      */
8718     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder> 
8719         getStoresOrBuilderList();
8720     /**
8721      * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code>
8722      */
8723     org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder getStoresOrBuilder(
8724         int index);
8725 
8726     // required int64 bulkload_seq_num = 4;
8727     /**
8728      * <code>required int64 bulkload_seq_num = 4;</code>
8729      */
8730     boolean hasBulkloadSeqNum();
8731     /**
8732      * <code>required int64 bulkload_seq_num = 4;</code>
8733      */
8734     long getBulkloadSeqNum();
8735   }
8736   /**
8737    * Protobuf type {@code hbase.pb.BulkLoadDescriptor}
8738    *
8739    * <pre>
8740    **
8741    * Special WAL entry used for writing bulk load events to WAL
8742    * </pre>
8743    */
8744   public static final class BulkLoadDescriptor extends
8745       com.google.protobuf.GeneratedMessage
8746       implements BulkLoadDescriptorOrBuilder {
8747     // Use BulkLoadDescriptor.newBuilder() to construct.
8748     private BulkLoadDescriptor(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
8749       super(builder);
8750       this.unknownFields = builder.getUnknownFields();
8751     }
8752     private BulkLoadDescriptor(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
8753 
8754     private static final BulkLoadDescriptor defaultInstance;
8755     public static BulkLoadDescriptor getDefaultInstance() {
8756       return defaultInstance;
8757     }
8758 
8759     public BulkLoadDescriptor getDefaultInstanceForType() {
8760       return defaultInstance;
8761     }
8762 
8763     private final com.google.protobuf.UnknownFieldSet unknownFields;
8764     @java.lang.Override
8765     public final com.google.protobuf.UnknownFieldSet
8766         getUnknownFields() {
8767       return this.unknownFields;
8768     }
8769     private BulkLoadDescriptor(
8770         com.google.protobuf.CodedInputStream input,
8771         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8772         throws com.google.protobuf.InvalidProtocolBufferException {
8773       initFields();
8774       int mutable_bitField0_ = 0;
8775       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
8776           com.google.protobuf.UnknownFieldSet.newBuilder();
8777       try {
8778         boolean done = false;
8779         while (!done) {
8780           int tag = input.readTag();
8781           switch (tag) {
8782             case 0:
8783               done = true;
8784               break;
8785             default: {
8786               if (!parseUnknownField(input, unknownFields,
8787                                      extensionRegistry, tag)) {
8788                 done = true;
8789               }
8790               break;
8791             }
8792             case 10: {
8793               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
8794               if (((bitField0_ & 0x00000001) == 0x00000001)) {
8795                 subBuilder = tableName_.toBuilder();
8796               }
8797               tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
8798               if (subBuilder != null) {
8799                 subBuilder.mergeFrom(tableName_);
8800                 tableName_ = subBuilder.buildPartial();
8801               }
8802               bitField0_ |= 0x00000001;
8803               break;
8804             }
8805             case 18: {
8806               bitField0_ |= 0x00000002;
8807               encodedRegionName_ = input.readBytes();
8808               break;
8809             }
8810             case 26: {
8811               if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
8812                 stores_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor>();
8813                 mutable_bitField0_ |= 0x00000004;
8814               }
8815               stores_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.PARSER, extensionRegistry));
8816               break;
8817             }
8818             case 32: {
8819               bitField0_ |= 0x00000004;
8820               bulkloadSeqNum_ = input.readInt64();
8821               break;
8822             }
8823           }
8824         }
8825       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
8826         throw e.setUnfinishedMessage(this);
8827       } catch (java.io.IOException e) {
8828         throw new com.google.protobuf.InvalidProtocolBufferException(
8829             e.getMessage()).setUnfinishedMessage(this);
8830       } finally {
8831         if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
8832           stores_ = java.util.Collections.unmodifiableList(stores_);
8833         }
8834         this.unknownFields = unknownFields.build();
8835         makeExtensionsImmutable();
8836       }
8837     }
8838     public static final com.google.protobuf.Descriptors.Descriptor
8839         getDescriptor() {
8840       return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_BulkLoadDescriptor_descriptor;
8841     }
8842 
8843     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
8844         internalGetFieldAccessorTable() {
8845       return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_BulkLoadDescriptor_fieldAccessorTable
8846           .ensureFieldAccessorsInitialized(
8847               org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor.Builder.class);
8848     }
8849 
8850     public static com.google.protobuf.Parser<BulkLoadDescriptor> PARSER =
8851         new com.google.protobuf.AbstractParser<BulkLoadDescriptor>() {
8852       public BulkLoadDescriptor parsePartialFrom(
8853           com.google.protobuf.CodedInputStream input,
8854           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8855           throws com.google.protobuf.InvalidProtocolBufferException {
8856         return new BulkLoadDescriptor(input, extensionRegistry);
8857       }
8858     };
8859 
8860     @java.lang.Override
8861     public com.google.protobuf.Parser<BulkLoadDescriptor> getParserForType() {
8862       return PARSER;
8863     }
8864 
8865     private int bitField0_;
8866     // required .hbase.pb.TableName table_name = 1;
8867     public static final int TABLE_NAME_FIELD_NUMBER = 1;
8868     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_;
8869     /**
8870      * <code>required .hbase.pb.TableName table_name = 1;</code>
8871      */
8872     public boolean hasTableName() {
8873       return ((bitField0_ & 0x00000001) == 0x00000001);
8874     }
8875     /**
8876      * <code>required .hbase.pb.TableName table_name = 1;</code>
8877      */
8878     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
8879       return tableName_;
8880     }
8881     /**
8882      * <code>required .hbase.pb.TableName table_name = 1;</code>
8883      */
8884     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
8885       return tableName_;
8886     }
8887 
8888     // required bytes encoded_region_name = 2;
8889     public static final int ENCODED_REGION_NAME_FIELD_NUMBER = 2;
8890     private com.google.protobuf.ByteString encodedRegionName_;
8891     /**
8892      * <code>required bytes encoded_region_name = 2;</code>
8893      */
8894     public boolean hasEncodedRegionName() {
8895       return ((bitField0_ & 0x00000002) == 0x00000002);
8896     }
8897     /**
8898      * <code>required bytes encoded_region_name = 2;</code>
8899      */
8900     public com.google.protobuf.ByteString getEncodedRegionName() {
8901       return encodedRegionName_;
8902     }
8903 
8904     // repeated .hbase.pb.StoreDescriptor stores = 3;
8905     public static final int STORES_FIELD_NUMBER = 3;
8906     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor> stores_;
8907     /**
8908      * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code>
8909      */
8910     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor> getStoresList() {
8911       return stores_;
8912     }
8913     /**
8914      * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code>
8915      */
8916     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder> 
8917         getStoresOrBuilderList() {
8918       return stores_;
8919     }
8920     /**
8921      * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code>
8922      */
8923     public int getStoresCount() {
8924       return stores_.size();
8925     }
8926     /**
8927      * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code>
8928      */
8929     public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor getStores(int index) {
8930       return stores_.get(index);
8931     }
8932     /**
8933      * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code>
8934      */
8935     public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder getStoresOrBuilder(
8936         int index) {
8937       return stores_.get(index);
8938     }
8939 
8940     // required int64 bulkload_seq_num = 4;
8941     public static final int BULKLOAD_SEQ_NUM_FIELD_NUMBER = 4;
8942     private long bulkloadSeqNum_;
8943     /**
8944      * <code>required int64 bulkload_seq_num = 4;</code>
8945      */
8946     public boolean hasBulkloadSeqNum() {
8947       return ((bitField0_ & 0x00000004) == 0x00000004);
8948     }
8949     /**
8950      * <code>required int64 bulkload_seq_num = 4;</code>
8951      */
8952     public long getBulkloadSeqNum() {
8953       return bulkloadSeqNum_;
8954     }
8955 
8956     private void initFields() {
8957       tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
8958       encodedRegionName_ = com.google.protobuf.ByteString.EMPTY;
8959       stores_ = java.util.Collections.emptyList();
8960       bulkloadSeqNum_ = 0L;
8961     }
8962     private byte memoizedIsInitialized = -1;
8963     public final boolean isInitialized() {
8964       byte isInitialized = memoizedIsInitialized;
8965       if (isInitialized != -1) return isInitialized == 1;
8966 
8967       if (!hasTableName()) {
8968         memoizedIsInitialized = 0;
8969         return false;
8970       }
8971       if (!hasEncodedRegionName()) {
8972         memoizedIsInitialized = 0;
8973         return false;
8974       }
8975       if (!hasBulkloadSeqNum()) {
8976         memoizedIsInitialized = 0;
8977         return false;
8978       }
8979       if (!getTableName().isInitialized()) {
8980         memoizedIsInitialized = 0;
8981         return false;
8982       }
8983       for (int i = 0; i < getStoresCount(); i++) {
8984         if (!getStores(i).isInitialized()) {
8985           memoizedIsInitialized = 0;
8986           return false;
8987         }
8988       }
8989       memoizedIsInitialized = 1;
8990       return true;
8991     }
8992 
8993     public void writeTo(com.google.protobuf.CodedOutputStream output)
8994                         throws java.io.IOException {
8995       getSerializedSize();
8996       if (((bitField0_ & 0x00000001) == 0x00000001)) {
8997         output.writeMessage(1, tableName_);
8998       }
8999       if (((bitField0_ & 0x00000002) == 0x00000002)) {
9000         output.writeBytes(2, encodedRegionName_);
9001       }
9002       for (int i = 0; i < stores_.size(); i++) {
9003         output.writeMessage(3, stores_.get(i));
9004       }
9005       if (((bitField0_ & 0x00000004) == 0x00000004)) {
9006         output.writeInt64(4, bulkloadSeqNum_);
9007       }
9008       getUnknownFields().writeTo(output);
9009     }
9010 
9011     private int memoizedSerializedSize = -1;
9012     public int getSerializedSize() {
9013       int size = memoizedSerializedSize;
9014       if (size != -1) return size;
9015 
9016       size = 0;
9017       if (((bitField0_ & 0x00000001) == 0x00000001)) {
9018         size += com.google.protobuf.CodedOutputStream
9019           .computeMessageSize(1, tableName_);
9020       }
9021       if (((bitField0_ & 0x00000002) == 0x00000002)) {
9022         size += com.google.protobuf.CodedOutputStream
9023           .computeBytesSize(2, encodedRegionName_);
9024       }
9025       for (int i = 0; i < stores_.size(); i++) {
9026         size += com.google.protobuf.CodedOutputStream
9027           .computeMessageSize(3, stores_.get(i));
9028       }
9029       if (((bitField0_ & 0x00000004) == 0x00000004)) {
9030         size += com.google.protobuf.CodedOutputStream
9031           .computeInt64Size(4, bulkloadSeqNum_);
9032       }
9033       size += getUnknownFields().getSerializedSize();
9034       memoizedSerializedSize = size;
9035       return size;
9036     }
9037 
9038     private static final long serialVersionUID = 0L;
9039     @java.lang.Override
9040     protected java.lang.Object writeReplace()
9041         throws java.io.ObjectStreamException {
9042       return super.writeReplace();
9043     }
9044 
9045     @java.lang.Override
9046     public boolean equals(final java.lang.Object obj) {
9047       if (obj == this) {
9048        return true;
9049       }
9050       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor)) {
9051         return super.equals(obj);
9052       }
9053       org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor other = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor) obj;
9054 
9055       boolean result = true;
9056       result = result && (hasTableName() == other.hasTableName());
9057       if (hasTableName()) {
9058         result = result && getTableName()
9059             .equals(other.getTableName());
9060       }
9061       result = result && (hasEncodedRegionName() == other.hasEncodedRegionName());
9062       if (hasEncodedRegionName()) {
9063         result = result && getEncodedRegionName()
9064             .equals(other.getEncodedRegionName());
9065       }
9066       result = result && getStoresList()
9067           .equals(other.getStoresList());
9068       result = result && (hasBulkloadSeqNum() == other.hasBulkloadSeqNum());
9069       if (hasBulkloadSeqNum()) {
9070         result = result && (getBulkloadSeqNum()
9071             == other.getBulkloadSeqNum());
9072       }
9073       result = result &&
9074           getUnknownFields().equals(other.getUnknownFields());
9075       return result;
9076     }
9077 
9078     private int memoizedHashCode = 0;
9079     @java.lang.Override
9080     public int hashCode() {
9081       if (memoizedHashCode != 0) {
9082         return memoizedHashCode;
9083       }
9084       int hash = 41;
9085       hash = (19 * hash) + getDescriptorForType().hashCode();
9086       if (hasTableName()) {
9087         hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER;
9088         hash = (53 * hash) + getTableName().hashCode();
9089       }
9090       if (hasEncodedRegionName()) {
9091         hash = (37 * hash) + ENCODED_REGION_NAME_FIELD_NUMBER;
9092         hash = (53 * hash) + getEncodedRegionName().hashCode();
9093       }
9094       if (getStoresCount() > 0) {
9095         hash = (37 * hash) + STORES_FIELD_NUMBER;
9096         hash = (53 * hash) + getStoresList().hashCode();
9097       }
9098       if (hasBulkloadSeqNum()) {
9099         hash = (37 * hash) + BULKLOAD_SEQ_NUM_FIELD_NUMBER;
9100         hash = (53 * hash) + hashLong(getBulkloadSeqNum());
9101       }
9102       hash = (29 * hash) + getUnknownFields().hashCode();
9103       memoizedHashCode = hash;
9104       return hash;
9105     }
9106 
9107     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor parseFrom(
9108         com.google.protobuf.ByteString data)
9109         throws com.google.protobuf.InvalidProtocolBufferException {
9110       return PARSER.parseFrom(data);
9111     }
9112     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor parseFrom(
9113         com.google.protobuf.ByteString data,
9114         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9115         throws com.google.protobuf.InvalidProtocolBufferException {
9116       return PARSER.parseFrom(data, extensionRegistry);
9117     }
9118     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor parseFrom(byte[] data)
9119         throws com.google.protobuf.InvalidProtocolBufferException {
9120       return PARSER.parseFrom(data);
9121     }
9122     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor parseFrom(
9123         byte[] data,
9124         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9125         throws com.google.protobuf.InvalidProtocolBufferException {
9126       return PARSER.parseFrom(data, extensionRegistry);
9127     }
9128     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor parseFrom(java.io.InputStream input)
9129         throws java.io.IOException {
9130       return PARSER.parseFrom(input);
9131     }
9132     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor parseFrom(
9133         java.io.InputStream input,
9134         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9135         throws java.io.IOException {
9136       return PARSER.parseFrom(input, extensionRegistry);
9137     }
9138     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor parseDelimitedFrom(java.io.InputStream input)
9139         throws java.io.IOException {
9140       return PARSER.parseDelimitedFrom(input);
9141     }
9142     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor parseDelimitedFrom(
9143         java.io.InputStream input,
9144         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9145         throws java.io.IOException {
9146       return PARSER.parseDelimitedFrom(input, extensionRegistry);
9147     }
9148     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor parseFrom(
9149         com.google.protobuf.CodedInputStream input)
9150         throws java.io.IOException {
9151       return PARSER.parseFrom(input);
9152     }
9153     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor parseFrom(
9154         com.google.protobuf.CodedInputStream input,
9155         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9156         throws java.io.IOException {
9157       return PARSER.parseFrom(input, extensionRegistry);
9158     }
9159 
9160     public static Builder newBuilder() { return Builder.create(); }
9161     public Builder newBuilderForType() { return newBuilder(); }
9162     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor prototype) {
9163       return newBuilder().mergeFrom(prototype);
9164     }
9165     public Builder toBuilder() { return newBuilder(this); }
9166 
9167     @java.lang.Override
9168     protected Builder newBuilderForType(
9169         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
9170       Builder builder = new Builder(parent);
9171       return builder;
9172     }
9173     /**
9174      * Protobuf type {@code hbase.pb.BulkLoadDescriptor}
9175      *
9176      * <pre>
9177      **
9178      * Special WAL entry used for writing bulk load events to WAL
9179      * </pre>
9180      */
9181     public static final class Builder extends
9182         com.google.protobuf.GeneratedMessage.Builder<Builder>
9183        implements org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptorOrBuilder {
9184       public static final com.google.protobuf.Descriptors.Descriptor
9185           getDescriptor() {
9186         return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_BulkLoadDescriptor_descriptor;
9187       }
9188 
9189       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
9190           internalGetFieldAccessorTable() {
9191         return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_BulkLoadDescriptor_fieldAccessorTable
9192             .ensureFieldAccessorsInitialized(
9193                 org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor.Builder.class);
9194       }
9195 
9196       // Construct using org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor.newBuilder()
9197       private Builder() {
9198         maybeForceBuilderInitialization();
9199       }
9200 
9201       private Builder(
9202           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
9203         super(parent);
9204         maybeForceBuilderInitialization();
9205       }
9206       private void maybeForceBuilderInitialization() {
9207         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
9208           getTableNameFieldBuilder();
9209           getStoresFieldBuilder();
9210         }
9211       }
9212       private static Builder create() {
9213         return new Builder();
9214       }
9215 
9216       public Builder clear() {
9217         super.clear();
9218         if (tableNameBuilder_ == null) {
9219           tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
9220         } else {
9221           tableNameBuilder_.clear();
9222         }
9223         bitField0_ = (bitField0_ & ~0x00000001);
9224         encodedRegionName_ = com.google.protobuf.ByteString.EMPTY;
9225         bitField0_ = (bitField0_ & ~0x00000002);
9226         if (storesBuilder_ == null) {
9227           stores_ = java.util.Collections.emptyList();
9228           bitField0_ = (bitField0_ & ~0x00000004);
9229         } else {
9230           storesBuilder_.clear();
9231         }
9232         bulkloadSeqNum_ = 0L;
9233         bitField0_ = (bitField0_ & ~0x00000008);
9234         return this;
9235       }
9236 
9237       public Builder clone() {
9238         return create().mergeFrom(buildPartial());
9239       }
9240 
9241       public com.google.protobuf.Descriptors.Descriptor
9242           getDescriptorForType() {
9243         return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_BulkLoadDescriptor_descriptor;
9244       }
9245 
9246       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor getDefaultInstanceForType() {
9247         return org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor.getDefaultInstance();
9248       }
9249 
9250       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor build() {
9251         org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor result = buildPartial();
9252         if (!result.isInitialized()) {
9253           throw newUninitializedMessageException(result);
9254         }
9255         return result;
9256       }
9257 
9258       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor buildPartial() {
9259         org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor(this);
9260         int from_bitField0_ = bitField0_;
9261         int to_bitField0_ = 0;
9262         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
9263           to_bitField0_ |= 0x00000001;
9264         }
9265         if (tableNameBuilder_ == null) {
9266           result.tableName_ = tableName_;
9267         } else {
9268           result.tableName_ = tableNameBuilder_.build();
9269         }
9270         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
9271           to_bitField0_ |= 0x00000002;
9272         }
9273         result.encodedRegionName_ = encodedRegionName_;
9274         if (storesBuilder_ == null) {
9275           if (((bitField0_ & 0x00000004) == 0x00000004)) {
9276             stores_ = java.util.Collections.unmodifiableList(stores_);
9277             bitField0_ = (bitField0_ & ~0x00000004);
9278           }
9279           result.stores_ = stores_;
9280         } else {
9281           result.stores_ = storesBuilder_.build();
9282         }
9283         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
9284           to_bitField0_ |= 0x00000004;
9285         }
9286         result.bulkloadSeqNum_ = bulkloadSeqNum_;
9287         result.bitField0_ = to_bitField0_;
9288         onBuilt();
9289         return result;
9290       }
9291 
9292       public Builder mergeFrom(com.google.protobuf.Message other) {
9293         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor) {
9294           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor)other);
9295         } else {
9296           super.mergeFrom(other);
9297           return this;
9298         }
9299       }
9300 
9301       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor other) {
9302         if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor.getDefaultInstance()) return this;
9303         if (other.hasTableName()) {
9304           mergeTableName(other.getTableName());
9305         }
9306         if (other.hasEncodedRegionName()) {
9307           setEncodedRegionName(other.getEncodedRegionName());
9308         }
9309         if (storesBuilder_ == null) {
9310           if (!other.stores_.isEmpty()) {
9311             if (stores_.isEmpty()) {
9312               stores_ = other.stores_;
9313               bitField0_ = (bitField0_ & ~0x00000004);
9314             } else {
9315               ensureStoresIsMutable();
9316               stores_.addAll(other.stores_);
9317             }
9318             onChanged();
9319           }
9320         } else {
9321           if (!other.stores_.isEmpty()) {
9322             if (storesBuilder_.isEmpty()) {
9323               storesBuilder_.dispose();
9324               storesBuilder_ = null;
9325               stores_ = other.stores_;
9326               bitField0_ = (bitField0_ & ~0x00000004);
9327               storesBuilder_ = 
9328                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
9329                    getStoresFieldBuilder() : null;
9330             } else {
9331               storesBuilder_.addAllMessages(other.stores_);
9332             }
9333           }
9334         }
9335         if (other.hasBulkloadSeqNum()) {
9336           setBulkloadSeqNum(other.getBulkloadSeqNum());
9337         }
9338         this.mergeUnknownFields(other.getUnknownFields());
9339         return this;
9340       }
9341 
9342       public final boolean isInitialized() {
9343         if (!hasTableName()) {
9344           
9345           return false;
9346         }
9347         if (!hasEncodedRegionName()) {
9348           
9349           return false;
9350         }
9351         if (!hasBulkloadSeqNum()) {
9352           
9353           return false;
9354         }
9355         if (!getTableName().isInitialized()) {
9356           
9357           return false;
9358         }
9359         for (int i = 0; i < getStoresCount(); i++) {
9360           if (!getStores(i).isInitialized()) {
9361             
9362             return false;
9363           }
9364         }
9365         return true;
9366       }
9367 
9368       public Builder mergeFrom(
9369           com.google.protobuf.CodedInputStream input,
9370           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9371           throws java.io.IOException {
9372         org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor parsedMessage = null;
9373         try {
9374           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
9375         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
9376           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor) e.getUnfinishedMessage();
9377           throw e;
9378         } finally {
9379           if (parsedMessage != null) {
9380             mergeFrom(parsedMessage);
9381           }
9382         }
9383         return this;
9384       }
9385       private int bitField0_;
9386 
9387       // required .hbase.pb.TableName table_name = 1;
9388       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
9389       private com.google.protobuf.SingleFieldBuilder<
9390           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
9391       /**
9392        * <code>required .hbase.pb.TableName table_name = 1;</code>
9393        */
9394       public boolean hasTableName() {
9395         return ((bitField0_ & 0x00000001) == 0x00000001);
9396       }
9397       /**
9398        * <code>required .hbase.pb.TableName table_name = 1;</code>
9399        */
9400       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
9401         if (tableNameBuilder_ == null) {
9402           return tableName_;
9403         } else {
9404           return tableNameBuilder_.getMessage();
9405         }
9406       }
9407       /**
9408        * <code>required .hbase.pb.TableName table_name = 1;</code>
9409        */
9410       public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
9411         if (tableNameBuilder_ == null) {
9412           if (value == null) {
9413             throw new NullPointerException();
9414           }
9415           tableName_ = value;
9416           onChanged();
9417         } else {
9418           tableNameBuilder_.setMessage(value);
9419         }
9420         bitField0_ |= 0x00000001;
9421         return this;
9422       }
9423       /**
9424        * <code>required .hbase.pb.TableName table_name = 1;</code>
9425        */
9426       public Builder setTableName(
9427           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
9428         if (tableNameBuilder_ == null) {
9429           tableName_ = builderForValue.build();
9430           onChanged();
9431         } else {
9432           tableNameBuilder_.setMessage(builderForValue.build());
9433         }
9434         bitField0_ |= 0x00000001;
9435         return this;
9436       }
9437       /**
9438        * <code>required .hbase.pb.TableName table_name = 1;</code>
9439        */
9440       public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
9441         if (tableNameBuilder_ == null) {
9442           if (((bitField0_ & 0x00000001) == 0x00000001) &&
9443               tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
9444             tableName_ =
9445               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
9446           } else {
9447             tableName_ = value;
9448           }
9449           onChanged();
9450         } else {
9451           tableNameBuilder_.mergeFrom(value);
9452         }
9453         bitField0_ |= 0x00000001;
9454         return this;
9455       }
9456       /**
9457        * <code>required .hbase.pb.TableName table_name = 1;</code>
9458        */
9459       public Builder clearTableName() {
9460         if (tableNameBuilder_ == null) {
9461           tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
9462           onChanged();
9463         } else {
9464           tableNameBuilder_.clear();
9465         }
9466         bitField0_ = (bitField0_ & ~0x00000001);
9467         return this;
9468       }
9469       /**
9470        * <code>required .hbase.pb.TableName table_name = 1;</code>
9471        */
9472       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
9473         bitField0_ |= 0x00000001;
9474         onChanged();
9475         return getTableNameFieldBuilder().getBuilder();
9476       }
9477       /**
9478        * <code>required .hbase.pb.TableName table_name = 1;</code>
9479        */
9480       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
9481         if (tableNameBuilder_ != null) {
9482           return tableNameBuilder_.getMessageOrBuilder();
9483         } else {
9484           return tableName_;
9485         }
9486       }
9487       /**
9488        * <code>required .hbase.pb.TableName table_name = 1;</code>
9489        */
9490       private com.google.protobuf.SingleFieldBuilder<
9491           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> 
9492           getTableNameFieldBuilder() {
9493         if (tableNameBuilder_ == null) {
9494           tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
9495               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
9496                   tableName_,
9497                   getParentForChildren(),
9498                   isClean());
9499           tableName_ = null;
9500         }
9501         return tableNameBuilder_;
9502       }
9503 
9504       // required bytes encoded_region_name = 2;
9505       private com.google.protobuf.ByteString encodedRegionName_ = com.google.protobuf.ByteString.EMPTY;
9506       /**
9507        * <code>required bytes encoded_region_name = 2;</code>
9508        */
9509       public boolean hasEncodedRegionName() {
9510         return ((bitField0_ & 0x00000002) == 0x00000002);
9511       }
9512       /**
9513        * <code>required bytes encoded_region_name = 2;</code>
9514        */
9515       public com.google.protobuf.ByteString getEncodedRegionName() {
9516         return encodedRegionName_;
9517       }
9518       /**
9519        * <code>required bytes encoded_region_name = 2;</code>
9520        */
9521       public Builder setEncodedRegionName(com.google.protobuf.ByteString value) {
9522         if (value == null) {
9523     throw new NullPointerException();
9524   }
9525   bitField0_ |= 0x00000002;
9526         encodedRegionName_ = value;
9527         onChanged();
9528         return this;
9529       }
9530       /**
9531        * <code>required bytes encoded_region_name = 2;</code>
9532        */
9533       public Builder clearEncodedRegionName() {
9534         bitField0_ = (bitField0_ & ~0x00000002);
9535         encodedRegionName_ = getDefaultInstance().getEncodedRegionName();
9536         onChanged();
9537         return this;
9538       }
9539 
9540       // repeated .hbase.pb.StoreDescriptor stores = 3;
9541       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor> stores_ =
9542         java.util.Collections.emptyList();
9543       private void ensureStoresIsMutable() {
9544         if (!((bitField0_ & 0x00000004) == 0x00000004)) {
9545           stores_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor>(stores_);
9546           bitField0_ |= 0x00000004;
9547          }
9548       }
9549 
9550       private com.google.protobuf.RepeatedFieldBuilder<
9551           org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder> storesBuilder_;
9552 
9553       /**
9554        * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code>
9555        */
9556       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor> getStoresList() {
9557         if (storesBuilder_ == null) {
9558           return java.util.Collections.unmodifiableList(stores_);
9559         } else {
9560           return storesBuilder_.getMessageList();
9561         }
9562       }
9563       /**
9564        * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code>
9565        */
9566       public int getStoresCount() {
9567         if (storesBuilder_ == null) {
9568           return stores_.size();
9569         } else {
9570           return storesBuilder_.getCount();
9571         }
9572       }
9573       /**
9574        * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code>
9575        */
9576       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor getStores(int index) {
9577         if (storesBuilder_ == null) {
9578           return stores_.get(index);
9579         } else {
9580           return storesBuilder_.getMessage(index);
9581         }
9582       }
9583       /**
9584        * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code>
9585        */
9586       public Builder setStores(
9587           int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor value) {
9588         if (storesBuilder_ == null) {
9589           if (value == null) {
9590             throw new NullPointerException();
9591           }
9592           ensureStoresIsMutable();
9593           stores_.set(index, value);
9594           onChanged();
9595         } else {
9596           storesBuilder_.setMessage(index, value);
9597         }
9598         return this;
9599       }
9600       /**
9601        * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code>
9602        */
9603       public Builder setStores(
9604           int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder builderForValue) {
9605         if (storesBuilder_ == null) {
9606           ensureStoresIsMutable();
9607           stores_.set(index, builderForValue.build());
9608           onChanged();
9609         } else {
9610           storesBuilder_.setMessage(index, builderForValue.build());
9611         }
9612         return this;
9613       }
9614       /**
9615        * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code>
9616        */
9617       public Builder addStores(org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor value) {
9618         if (storesBuilder_ == null) {
9619           if (value == null) {
9620             throw new NullPointerException();
9621           }
9622           ensureStoresIsMutable();
9623           stores_.add(value);
9624           onChanged();
9625         } else {
9626           storesBuilder_.addMessage(value);
9627         }
9628         return this;
9629       }
9630       /**
9631        * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code>
9632        */
9633       public Builder addStores(
9634           int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor value) {
9635         if (storesBuilder_ == null) {
9636           if (value == null) {
9637             throw new NullPointerException();
9638           }
9639           ensureStoresIsMutable();
9640           stores_.add(index, value);
9641           onChanged();
9642         } else {
9643           storesBuilder_.addMessage(index, value);
9644         }
9645         return this;
9646       }
9647       /**
9648        * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code>
9649        */
9650       public Builder addStores(
9651           org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder builderForValue) {
9652         if (storesBuilder_ == null) {
9653           ensureStoresIsMutable();
9654           stores_.add(builderForValue.build());
9655           onChanged();
9656         } else {
9657           storesBuilder_.addMessage(builderForValue.build());
9658         }
9659         return this;
9660       }
9661       /**
9662        * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code>
9663        */
9664       public Builder addStores(
9665           int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder builderForValue) {
9666         if (storesBuilder_ == null) {
9667           ensureStoresIsMutable();
9668           stores_.add(index, builderForValue.build());
9669           onChanged();
9670         } else {
9671           storesBuilder_.addMessage(index, builderForValue.build());
9672         }
9673         return this;
9674       }
9675       /**
9676        * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code>
9677        */
9678       public Builder addAllStores(
9679           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor> values) {
9680         if (storesBuilder_ == null) {
9681           ensureStoresIsMutable();
9682           super.addAll(values, stores_);
9683           onChanged();
9684         } else {
9685           storesBuilder_.addAllMessages(values);
9686         }
9687         return this;
9688       }
9689       /**
9690        * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code>
9691        */
9692       public Builder clearStores() {
9693         if (storesBuilder_ == null) {
9694           stores_ = java.util.Collections.emptyList();
9695           bitField0_ = (bitField0_ & ~0x00000004);
9696           onChanged();
9697         } else {
9698           storesBuilder_.clear();
9699         }
9700         return this;
9701       }
9702       /**
9703        * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code>
9704        */
9705       public Builder removeStores(int index) {
9706         if (storesBuilder_ == null) {
9707           ensureStoresIsMutable();
9708           stores_.remove(index);
9709           onChanged();
9710         } else {
9711           storesBuilder_.remove(index);
9712         }
9713         return this;
9714       }
9715       /**
9716        * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code>
9717        */
9718       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder getStoresBuilder(
9719           int index) {
9720         return getStoresFieldBuilder().getBuilder(index);
9721       }
9722       /**
9723        * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code>
9724        */
9725       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder getStoresOrBuilder(
9726           int index) {
9727         if (storesBuilder_ == null) {
9728           return stores_.get(index);  } else {
9729           return storesBuilder_.getMessageOrBuilder(index);
9730         }
9731       }
9732       /**
9733        * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code>
9734        */
9735       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder> 
9736            getStoresOrBuilderList() {
9737         if (storesBuilder_ != null) {
9738           return storesBuilder_.getMessageOrBuilderList();
9739         } else {
9740           return java.util.Collections.unmodifiableList(stores_);
9741         }
9742       }
9743       /**
9744        * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code>
9745        */
9746       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder addStoresBuilder() {
9747         return getStoresFieldBuilder().addBuilder(
9748             org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.getDefaultInstance());
9749       }
9750       /**
9751        * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code>
9752        */
9753       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder addStoresBuilder(
9754           int index) {
9755         return getStoresFieldBuilder().addBuilder(
9756             index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.getDefaultInstance());
9757       }
9758       /**
9759        * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code>
9760        */
9761       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder> 
9762            getStoresBuilderList() {
9763         return getStoresFieldBuilder().getBuilderList();
9764       }
9765       private com.google.protobuf.RepeatedFieldBuilder<
9766           org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder> 
9767           getStoresFieldBuilder() {
9768         if (storesBuilder_ == null) {
9769           storesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
9770               org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder>(
9771                   stores_,
9772                   ((bitField0_ & 0x00000004) == 0x00000004),
9773                   getParentForChildren(),
9774                   isClean());
9775           stores_ = null;
9776         }
9777         return storesBuilder_;
9778       }
9779 
9780       // required int64 bulkload_seq_num = 4;
9781       private long bulkloadSeqNum_ ;
9782       /**
9783        * <code>required int64 bulkload_seq_num = 4;</code>
9784        */
9785       public boolean hasBulkloadSeqNum() {
9786         return ((bitField0_ & 0x00000008) == 0x00000008);
9787       }
9788       /**
9789        * <code>required int64 bulkload_seq_num = 4;</code>
9790        */
9791       public long getBulkloadSeqNum() {
9792         return bulkloadSeqNum_;
9793       }
9794       /**
9795        * <code>required int64 bulkload_seq_num = 4;</code>
9796        */
9797       public Builder setBulkloadSeqNum(long value) {
9798         bitField0_ |= 0x00000008;
9799         bulkloadSeqNum_ = value;
9800         onChanged();
9801         return this;
9802       }
9803       /**
9804        * <code>required int64 bulkload_seq_num = 4;</code>
9805        */
9806       public Builder clearBulkloadSeqNum() {
9807         bitField0_ = (bitField0_ & ~0x00000008);
9808         bulkloadSeqNum_ = 0L;
9809         onChanged();
9810         return this;
9811       }
9812 
9813       // @@protoc_insertion_point(builder_scope:hbase.pb.BulkLoadDescriptor)
9814     }
9815 
9816     static {
9817       defaultInstance = new BulkLoadDescriptor(true);
9818       defaultInstance.initFields();
9819     }
9820 
9821     // @@protoc_insertion_point(class_scope:hbase.pb.BulkLoadDescriptor)
9822   }
9823 
9824   public interface RegionEventDescriptorOrBuilder
9825       extends com.google.protobuf.MessageOrBuilder {
9826 
9827     // required .hbase.pb.RegionEventDescriptor.EventType event_type = 1;
9828     /**
9829      * <code>required .hbase.pb.RegionEventDescriptor.EventType event_type = 1;</code>
9830      */
9831     boolean hasEventType();
9832     /**
9833      * <code>required .hbase.pb.RegionEventDescriptor.EventType event_type = 1;</code>
9834      */
9835     org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType getEventType();
9836 
9837     // required bytes table_name = 2;
9838     /**
9839      * <code>required bytes table_name = 2;</code>
9840      */
9841     boolean hasTableName();
9842     /**
9843      * <code>required bytes table_name = 2;</code>
9844      */
9845     com.google.protobuf.ByteString getTableName();
9846 
9847     // required bytes encoded_region_name = 3;
9848     /**
9849      * <code>required bytes encoded_region_name = 3;</code>
9850      */
9851     boolean hasEncodedRegionName();
9852     /**
9853      * <code>required bytes encoded_region_name = 3;</code>
9854      */
9855     com.google.protobuf.ByteString getEncodedRegionName();
9856 
9857     // optional uint64 log_sequence_number = 4;
9858     /**
9859      * <code>optional uint64 log_sequence_number = 4;</code>
9860      */
9861     boolean hasLogSequenceNumber();
9862     /**
9863      * <code>optional uint64 log_sequence_number = 4;</code>
9864      */
9865     long getLogSequenceNumber();
9866 
9867     // repeated .hbase.pb.StoreDescriptor stores = 5;
9868     /**
9869      * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code>
9870      */
9871     java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor> 
9872         getStoresList();
9873     /**
9874      * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code>
9875      */
9876     org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor getStores(int index);
9877     /**
9878      * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code>
9879      */
9880     int getStoresCount();
9881     /**
9882      * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code>
9883      */
9884     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder> 
9885         getStoresOrBuilderList();
9886     /**
9887      * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code>
9888      */
9889     org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder getStoresOrBuilder(
9890         int index);
9891 
9892     // optional .hbase.pb.ServerName server = 6;
9893     /**
9894      * <code>optional .hbase.pb.ServerName server = 6;</code>
9895      *
9896      * <pre>
9897      * Server who opened the region
9898      * </pre>
9899      */
9900     boolean hasServer();
9901     /**
9902      * <code>optional .hbase.pb.ServerName server = 6;</code>
9903      *
9904      * <pre>
9905      * Server who opened the region
9906      * </pre>
9907      */
9908     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer();
9909     /**
9910      * <code>optional .hbase.pb.ServerName server = 6;</code>
9911      *
9912      * <pre>
9913      * Server who opened the region
9914      * </pre>
9915      */
9916     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder();
9917 
9918     // optional bytes region_name = 7;
9919     /**
9920      * <code>optional bytes region_name = 7;</code>
9921      *
9922      * <pre>
9923      * full region name
9924      * </pre>
9925      */
9926     boolean hasRegionName();
9927     /**
9928      * <code>optional bytes region_name = 7;</code>
9929      *
9930      * <pre>
9931      * full region name
9932      * </pre>
9933      */
9934     com.google.protobuf.ByteString getRegionName();
9935   }
9936   /**
9937    * Protobuf type {@code hbase.pb.RegionEventDescriptor}
9938    *
9939    * <pre>
9940    **
9941    * Special WAL entry to hold all related to a region event (open/close).
9942    * </pre>
9943    */
9944   public static final class RegionEventDescriptor extends
9945       com.google.protobuf.GeneratedMessage
9946       implements RegionEventDescriptorOrBuilder {
9947     // Use RegionEventDescriptor.newBuilder() to construct.
9948     private RegionEventDescriptor(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
9949       super(builder);
9950       this.unknownFields = builder.getUnknownFields();
9951     }
9952     private RegionEventDescriptor(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
9953 
9954     private static final RegionEventDescriptor defaultInstance;
9955     public static RegionEventDescriptor getDefaultInstance() {
9956       return defaultInstance;
9957     }
9958 
9959     public RegionEventDescriptor getDefaultInstanceForType() {
9960       return defaultInstance;
9961     }
9962 
9963     private final com.google.protobuf.UnknownFieldSet unknownFields;
9964     @java.lang.Override
9965     public final com.google.protobuf.UnknownFieldSet
9966         getUnknownFields() {
9967       return this.unknownFields;
9968     }
9969     private RegionEventDescriptor(
9970         com.google.protobuf.CodedInputStream input,
9971         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9972         throws com.google.protobuf.InvalidProtocolBufferException {
9973       initFields();
9974       int mutable_bitField0_ = 0;
9975       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
9976           com.google.protobuf.UnknownFieldSet.newBuilder();
9977       try {
9978         boolean done = false;
9979         while (!done) {
9980           int tag = input.readTag();
9981           switch (tag) {
9982             case 0:
9983               done = true;
9984               break;
9985             default: {
9986               if (!parseUnknownField(input, unknownFields,
9987                                      extensionRegistry, tag)) {
9988                 done = true;
9989               }
9990               break;
9991             }
9992             case 8: {
9993               int rawValue = input.readEnum();
9994               org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType value = org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType.valueOf(rawValue);
9995               if (value == null) {
9996                 unknownFields.mergeVarintField(1, rawValue);
9997               } else {
9998                 bitField0_ |= 0x00000001;
9999                 eventType_ = value;
10000               }
10001               break;
10002             }
10003             case 18: {
10004               bitField0_ |= 0x00000002;
10005               tableName_ = input.readBytes();
10006               break;
10007             }
10008             case 26: {
10009               bitField0_ |= 0x00000004;
10010               encodedRegionName_ = input.readBytes();
10011               break;
10012             }
10013             case 32: {
10014               bitField0_ |= 0x00000008;
10015               logSequenceNumber_ = input.readUInt64();
10016               break;
10017             }
10018             case 42: {
10019               if (!((mutable_bitField0_ & 0x00000010) == 0x00000010)) {
10020                 stores_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor>();
10021                 mutable_bitField0_ |= 0x00000010;
10022               }
10023               stores_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.PARSER, extensionRegistry));
10024               break;
10025             }
10026             case 50: {
10027               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = null;
10028               if (((bitField0_ & 0x00000010) == 0x00000010)) {
10029                 subBuilder = server_.toBuilder();
10030               }
10031               server_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry);
10032               if (subBuilder != null) {
10033                 subBuilder.mergeFrom(server_);
10034                 server_ = subBuilder.buildPartial();
10035               }
10036               bitField0_ |= 0x00000010;
10037               break;
10038             }
10039             case 58: {
10040               bitField0_ |= 0x00000020;
10041               regionName_ = input.readBytes();
10042               break;
10043             }
10044           }
10045         }
10046       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
10047         throw e.setUnfinishedMessage(this);
10048       } catch (java.io.IOException e) {
10049         throw new com.google.protobuf.InvalidProtocolBufferException(
10050             e.getMessage()).setUnfinishedMessage(this);
10051       } finally {
10052         if (((mutable_bitField0_ & 0x00000010) == 0x00000010)) {
10053           stores_ = java.util.Collections.unmodifiableList(stores_);
10054         }
10055         this.unknownFields = unknownFields.build();
10056         makeExtensionsImmutable();
10057       }
10058     }
10059     public static final com.google.protobuf.Descriptors.Descriptor
10060         getDescriptor() {
10061       return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_RegionEventDescriptor_descriptor;
10062     }
10063 
10064     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
10065         internalGetFieldAccessorTable() {
10066       return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_RegionEventDescriptor_fieldAccessorTable
10067           .ensureFieldAccessorsInitialized(
10068               org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.Builder.class);
10069     }
10070 
10071     public static com.google.protobuf.Parser<RegionEventDescriptor> PARSER =
10072         new com.google.protobuf.AbstractParser<RegionEventDescriptor>() {
10073       public RegionEventDescriptor parsePartialFrom(
10074           com.google.protobuf.CodedInputStream input,
10075           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10076           throws com.google.protobuf.InvalidProtocolBufferException {
10077         return new RegionEventDescriptor(input, extensionRegistry);
10078       }
10079     };
10080 
10081     @java.lang.Override
10082     public com.google.protobuf.Parser<RegionEventDescriptor> getParserForType() {
10083       return PARSER;
10084     }
10085 
10086     /**
10087      * Protobuf enum {@code hbase.pb.RegionEventDescriptor.EventType}
10088      */
10089     public enum EventType
10090         implements com.google.protobuf.ProtocolMessageEnum {
10091       /**
10092        * <code>REGION_OPEN = 0;</code>
10093        */
10094       REGION_OPEN(0, 0),
10095       /**
10096        * <code>REGION_CLOSE = 1;</code>
10097        */
10098       REGION_CLOSE(1, 1),
10099       ;
10100 
10101       /**
10102        * <code>REGION_OPEN = 0;</code>
10103        */
10104       public static final int REGION_OPEN_VALUE = 0;
10105       /**
10106        * <code>REGION_CLOSE = 1;</code>
10107        */
10108       public static final int REGION_CLOSE_VALUE = 1;
10109 
10110 
10111       public final int getNumber() { return value; }
10112 
10113       public static EventType valueOf(int value) {
10114         switch (value) {
10115           case 0: return REGION_OPEN;
10116           case 1: return REGION_CLOSE;
10117           default: return null;
10118         }
10119       }
10120 
10121       public static com.google.protobuf.Internal.EnumLiteMap<EventType>
10122           internalGetValueMap() {
10123         return internalValueMap;
10124       }
10125       private static com.google.protobuf.Internal.EnumLiteMap<EventType>
10126           internalValueMap =
10127             new com.google.protobuf.Internal.EnumLiteMap<EventType>() {
10128               public EventType findValueByNumber(int number) {
10129                 return EventType.valueOf(number);
10130               }
10131             };
10132 
10133       public final com.google.protobuf.Descriptors.EnumValueDescriptor
10134           getValueDescriptor() {
10135         return getDescriptor().getValues().get(index);
10136       }
10137       public final com.google.protobuf.Descriptors.EnumDescriptor
10138           getDescriptorForType() {
10139         return getDescriptor();
10140       }
10141       public static final com.google.protobuf.Descriptors.EnumDescriptor
10142           getDescriptor() {
10143         return org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.getDescriptor().getEnumTypes().get(0);
10144       }
10145 
10146       private static final EventType[] VALUES = values();
10147 
10148       public static EventType valueOf(
10149           com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
10150         if (desc.getType() != getDescriptor()) {
10151           throw new java.lang.IllegalArgumentException(
10152             "EnumValueDescriptor is not for this type.");
10153         }
10154         return VALUES[desc.getIndex()];
10155       }
10156 
10157       private final int index;
10158       private final int value;
10159 
10160       private EventType(int index, int value) {
10161         this.index = index;
10162         this.value = value;
10163       }
10164 
10165       // @@protoc_insertion_point(enum_scope:hbase.pb.RegionEventDescriptor.EventType)
10166     }
10167 
10168     private int bitField0_;
10169     // required .hbase.pb.RegionEventDescriptor.EventType event_type = 1;
10170     public static final int EVENT_TYPE_FIELD_NUMBER = 1;
10171     private org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType eventType_;
10172     /**
10173      * <code>required .hbase.pb.RegionEventDescriptor.EventType event_type = 1;</code>
10174      */
10175     public boolean hasEventType() {
10176       return ((bitField0_ & 0x00000001) == 0x00000001);
10177     }
10178     /**
10179      * <code>required .hbase.pb.RegionEventDescriptor.EventType event_type = 1;</code>
10180      */
10181     public org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType getEventType() {
10182       return eventType_;
10183     }
10184 
10185     // required bytes table_name = 2;
10186     public static final int TABLE_NAME_FIELD_NUMBER = 2;
10187     private com.google.protobuf.ByteString tableName_;
10188     /**
10189      * <code>required bytes table_name = 2;</code>
10190      */
10191     public boolean hasTableName() {
10192       return ((bitField0_ & 0x00000002) == 0x00000002);
10193     }
10194     /**
10195      * <code>required bytes table_name = 2;</code>
10196      */
10197     public com.google.protobuf.ByteString getTableName() {
10198       return tableName_;
10199     }
10200 
10201     // required bytes encoded_region_name = 3;
10202     public static final int ENCODED_REGION_NAME_FIELD_NUMBER = 3;
10203     private com.google.protobuf.ByteString encodedRegionName_;
10204     /**
10205      * <code>required bytes encoded_region_name = 3;</code>
10206      */
10207     public boolean hasEncodedRegionName() {
10208       return ((bitField0_ & 0x00000004) == 0x00000004);
10209     }
10210     /**
10211      * <code>required bytes encoded_region_name = 3;</code>
10212      */
10213     public com.google.protobuf.ByteString getEncodedRegionName() {
10214       return encodedRegionName_;
10215     }
10216 
10217     // optional uint64 log_sequence_number = 4;
10218     public static final int LOG_SEQUENCE_NUMBER_FIELD_NUMBER = 4;
10219     private long logSequenceNumber_;
10220     /**
10221      * <code>optional uint64 log_sequence_number = 4;</code>
10222      */
10223     public boolean hasLogSequenceNumber() {
10224       return ((bitField0_ & 0x00000008) == 0x00000008);
10225     }
10226     /**
10227      * <code>optional uint64 log_sequence_number = 4;</code>
10228      */
10229     public long getLogSequenceNumber() {
10230       return logSequenceNumber_;
10231     }
10232 
10233     // repeated .hbase.pb.StoreDescriptor stores = 5;
10234     public static final int STORES_FIELD_NUMBER = 5;
10235     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor> stores_;
10236     /**
10237      * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code>
10238      */
10239     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor> getStoresList() {
10240       return stores_;
10241     }
10242     /**
10243      * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code>
10244      */
10245     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder> 
10246         getStoresOrBuilderList() {
10247       return stores_;
10248     }
10249     /**
10250      * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code>
10251      */
10252     public int getStoresCount() {
10253       return stores_.size();
10254     }
10255     /**
10256      * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code>
10257      */
10258     public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor getStores(int index) {
10259       return stores_.get(index);
10260     }
10261     /**
10262      * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code>
10263      */
10264     public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder getStoresOrBuilder(
10265         int index) {
10266       return stores_.get(index);
10267     }
10268 
10269     // optional .hbase.pb.ServerName server = 6;
10270     public static final int SERVER_FIELD_NUMBER = 6;
10271     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName server_;
10272     /**
10273      * <code>optional .hbase.pb.ServerName server = 6;</code>
10274      *
10275      * <pre>
10276      * Server who opened the region
10277      * </pre>
10278      */
10279     public boolean hasServer() {
10280       return ((bitField0_ & 0x00000010) == 0x00000010);
10281     }
10282     /**
10283      * <code>optional .hbase.pb.ServerName server = 6;</code>
10284      *
10285      * <pre>
10286      * Server who opened the region
10287      * </pre>
10288      */
10289     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer() {
10290       return server_;
10291     }
10292     /**
10293      * <code>optional .hbase.pb.ServerName server = 6;</code>
10294      *
10295      * <pre>
10296      * Server who opened the region
10297      * </pre>
10298      */
10299     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() {
10300       return server_;
10301     }
10302 
10303     // optional bytes region_name = 7;
10304     public static final int REGION_NAME_FIELD_NUMBER = 7;
10305     private com.google.protobuf.ByteString regionName_;
10306     /**
10307      * <code>optional bytes region_name = 7;</code>
10308      *
10309      * <pre>
10310      * full region name
10311      * </pre>
10312      */
10313     public boolean hasRegionName() {
10314       return ((bitField0_ & 0x00000020) == 0x00000020);
10315     }
10316     /**
10317      * <code>optional bytes region_name = 7;</code>
10318      *
10319      * <pre>
10320      * full region name
10321      * </pre>
10322      */
10323     public com.google.protobuf.ByteString getRegionName() {
10324       return regionName_;
10325     }
10326 
10327     private void initFields() {
10328       eventType_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType.REGION_OPEN;
10329       tableName_ = com.google.protobuf.ByteString.EMPTY;
10330       encodedRegionName_ = com.google.protobuf.ByteString.EMPTY;
10331       logSequenceNumber_ = 0L;
10332       stores_ = java.util.Collections.emptyList();
10333       server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
10334       regionName_ = com.google.protobuf.ByteString.EMPTY;
10335     }
10336     private byte memoizedIsInitialized = -1;
10337     public final boolean isInitialized() {
10338       byte isInitialized = memoizedIsInitialized;
10339       if (isInitialized != -1) return isInitialized == 1;
10340 
10341       if (!hasEventType()) {
10342         memoizedIsInitialized = 0;
10343         return false;
10344       }
10345       if (!hasTableName()) {
10346         memoizedIsInitialized = 0;
10347         return false;
10348       }
10349       if (!hasEncodedRegionName()) {
10350         memoizedIsInitialized = 0;
10351         return false;
10352       }
10353       for (int i = 0; i < getStoresCount(); i++) {
10354         if (!getStores(i).isInitialized()) {
10355           memoizedIsInitialized = 0;
10356           return false;
10357         }
10358       }
10359       if (hasServer()) {
10360         if (!getServer().isInitialized()) {
10361           memoizedIsInitialized = 0;
10362           return false;
10363         }
10364       }
10365       memoizedIsInitialized = 1;
10366       return true;
10367     }
10368 
10369     public void writeTo(com.google.protobuf.CodedOutputStream output)
10370                         throws java.io.IOException {
10371       getSerializedSize();
10372       if (((bitField0_ & 0x00000001) == 0x00000001)) {
10373         output.writeEnum(1, eventType_.getNumber());
10374       }
10375       if (((bitField0_ & 0x00000002) == 0x00000002)) {
10376         output.writeBytes(2, tableName_);
10377       }
10378       if (((bitField0_ & 0x00000004) == 0x00000004)) {
10379         output.writeBytes(3, encodedRegionName_);
10380       }
10381       if (((bitField0_ & 0x00000008) == 0x00000008)) {
10382         output.writeUInt64(4, logSequenceNumber_);
10383       }
10384       for (int i = 0; i < stores_.size(); i++) {
10385         output.writeMessage(5, stores_.get(i));
10386       }
10387       if (((bitField0_ & 0x00000010) == 0x00000010)) {
10388         output.writeMessage(6, server_);
10389       }
10390       if (((bitField0_ & 0x00000020) == 0x00000020)) {
10391         output.writeBytes(7, regionName_);
10392       }
10393       getUnknownFields().writeTo(output);
10394     }
10395 
10396     private int memoizedSerializedSize = -1;
10397     public int getSerializedSize() {
10398       int size = memoizedSerializedSize;
10399       if (size != -1) return size;
10400 
10401       size = 0;
10402       if (((bitField0_ & 0x00000001) == 0x00000001)) {
10403         size += com.google.protobuf.CodedOutputStream
10404           .computeEnumSize(1, eventType_.getNumber());
10405       }
10406       if (((bitField0_ & 0x00000002) == 0x00000002)) {
10407         size += com.google.protobuf.CodedOutputStream
10408           .computeBytesSize(2, tableName_);
10409       }
10410       if (((bitField0_ & 0x00000004) == 0x00000004)) {
10411         size += com.google.protobuf.CodedOutputStream
10412           .computeBytesSize(3, encodedRegionName_);
10413       }
10414       if (((bitField0_ & 0x00000008) == 0x00000008)) {
10415         size += com.google.protobuf.CodedOutputStream
10416           .computeUInt64Size(4, logSequenceNumber_);
10417       }
10418       for (int i = 0; i < stores_.size(); i++) {
10419         size += com.google.protobuf.CodedOutputStream
10420           .computeMessageSize(5, stores_.get(i));
10421       }
10422       if (((bitField0_ & 0x00000010) == 0x00000010)) {
10423         size += com.google.protobuf.CodedOutputStream
10424           .computeMessageSize(6, server_);
10425       }
10426       if (((bitField0_ & 0x00000020) == 0x00000020)) {
10427         size += com.google.protobuf.CodedOutputStream
10428           .computeBytesSize(7, regionName_);
10429       }
10430       size += getUnknownFields().getSerializedSize();
10431       memoizedSerializedSize = size;
10432       return size;
10433     }
10434 
10435     private static final long serialVersionUID = 0L;
10436     @java.lang.Override
10437     protected java.lang.Object writeReplace()
10438         throws java.io.ObjectStreamException {
10439       return super.writeReplace();
10440     }
10441 
10442     @java.lang.Override
10443     public boolean equals(final java.lang.Object obj) {
10444       if (obj == this) {
10445        return true;
10446       }
10447       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor)) {
10448         return super.equals(obj);
10449       }
10450       org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor other = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor) obj;
10451 
10452       boolean result = true;
10453       result = result && (hasEventType() == other.hasEventType());
10454       if (hasEventType()) {
10455         result = result &&
10456             (getEventType() == other.getEventType());
10457       }
10458       result = result && (hasTableName() == other.hasTableName());
10459       if (hasTableName()) {
10460         result = result && getTableName()
10461             .equals(other.getTableName());
10462       }
10463       result = result && (hasEncodedRegionName() == other.hasEncodedRegionName());
10464       if (hasEncodedRegionName()) {
10465         result = result && getEncodedRegionName()
10466             .equals(other.getEncodedRegionName());
10467       }
10468       result = result && (hasLogSequenceNumber() == other.hasLogSequenceNumber());
10469       if (hasLogSequenceNumber()) {
10470         result = result && (getLogSequenceNumber()
10471             == other.getLogSequenceNumber());
10472       }
10473       result = result && getStoresList()
10474           .equals(other.getStoresList());
10475       result = result && (hasServer() == other.hasServer());
10476       if (hasServer()) {
10477         result = result && getServer()
10478             .equals(other.getServer());
10479       }
10480       result = result && (hasRegionName() == other.hasRegionName());
10481       if (hasRegionName()) {
10482         result = result && getRegionName()
10483             .equals(other.getRegionName());
10484       }
10485       result = result &&
10486           getUnknownFields().equals(other.getUnknownFields());
10487       return result;
10488     }
10489 
10490     private int memoizedHashCode = 0;
10491     @java.lang.Override
10492     public int hashCode() {
10493       if (memoizedHashCode != 0) {
10494         return memoizedHashCode;
10495       }
10496       int hash = 41;
10497       hash = (19 * hash) + getDescriptorForType().hashCode();
10498       if (hasEventType()) {
10499         hash = (37 * hash) + EVENT_TYPE_FIELD_NUMBER;
10500         hash = (53 * hash) + hashEnum(getEventType());
10501       }
10502       if (hasTableName()) {
10503         hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER;
10504         hash = (53 * hash) + getTableName().hashCode();
10505       }
10506       if (hasEncodedRegionName()) {
10507         hash = (37 * hash) + ENCODED_REGION_NAME_FIELD_NUMBER;
10508         hash = (53 * hash) + getEncodedRegionName().hashCode();
10509       }
10510       if (hasLogSequenceNumber()) {
10511         hash = (37 * hash) + LOG_SEQUENCE_NUMBER_FIELD_NUMBER;
10512         hash = (53 * hash) + hashLong(getLogSequenceNumber());
10513       }
10514       if (getStoresCount() > 0) {
10515         hash = (37 * hash) + STORES_FIELD_NUMBER;
10516         hash = (53 * hash) + getStoresList().hashCode();
10517       }
10518       if (hasServer()) {
10519         hash = (37 * hash) + SERVER_FIELD_NUMBER;
10520         hash = (53 * hash) + getServer().hashCode();
10521       }
10522       if (hasRegionName()) {
10523         hash = (37 * hash) + REGION_NAME_FIELD_NUMBER;
10524         hash = (53 * hash) + getRegionName().hashCode();
10525       }
10526       hash = (29 * hash) + getUnknownFields().hashCode();
10527       memoizedHashCode = hash;
10528       return hash;
10529     }
10530 
10531     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom(
10532         com.google.protobuf.ByteString data)
10533         throws com.google.protobuf.InvalidProtocolBufferException {
10534       return PARSER.parseFrom(data);
10535     }
10536     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom(
10537         com.google.protobuf.ByteString data,
10538         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10539         throws com.google.protobuf.InvalidProtocolBufferException {
10540       return PARSER.parseFrom(data, extensionRegistry);
10541     }
10542     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom(byte[] data)
10543         throws com.google.protobuf.InvalidProtocolBufferException {
10544       return PARSER.parseFrom(data);
10545     }
10546     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom(
10547         byte[] data,
10548         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10549         throws com.google.protobuf.InvalidProtocolBufferException {
10550       return PARSER.parseFrom(data, extensionRegistry);
10551     }
10552     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom(java.io.InputStream input)
10553         throws java.io.IOException {
10554       return PARSER.parseFrom(input);
10555     }
10556     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom(
10557         java.io.InputStream input,
10558         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10559         throws java.io.IOException {
10560       return PARSER.parseFrom(input, extensionRegistry);
10561     }
10562     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseDelimitedFrom(java.io.InputStream input)
10563         throws java.io.IOException {
10564       return PARSER.parseDelimitedFrom(input);
10565     }
10566     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseDelimitedFrom(
10567         java.io.InputStream input,
10568         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10569         throws java.io.IOException {
10570       return PARSER.parseDelimitedFrom(input, extensionRegistry);
10571     }
10572     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom(
10573         com.google.protobuf.CodedInputStream input)
10574         throws java.io.IOException {
10575       return PARSER.parseFrom(input);
10576     }
10577     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom(
10578         com.google.protobuf.CodedInputStream input,
10579         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10580         throws java.io.IOException {
10581       return PARSER.parseFrom(input, extensionRegistry);
10582     }
10583 
10584     public static Builder newBuilder() { return Builder.create(); }
10585     public Builder newBuilderForType() { return newBuilder(); }
10586     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor prototype) {
10587       return newBuilder().mergeFrom(prototype);
10588     }
10589     public Builder toBuilder() { return newBuilder(this); }
10590 
10591     @java.lang.Override
10592     protected Builder newBuilderForType(
10593         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
10594       Builder builder = new Builder(parent);
10595       return builder;
10596     }
10597     /**
10598      * Protobuf type {@code hbase.pb.RegionEventDescriptor}
10599      *
10600      * <pre>
10601      **
10602      * Special WAL entry to hold all related to a region event (open/close).
10603      * </pre>
10604      */
10605     public static final class Builder extends
10606         com.google.protobuf.GeneratedMessage.Builder<Builder>
10607        implements org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptorOrBuilder {
10608       public static final com.google.protobuf.Descriptors.Descriptor
10609           getDescriptor() {
10610         return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_RegionEventDescriptor_descriptor;
10611       }
10612 
10613       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
10614           internalGetFieldAccessorTable() {
10615         return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_RegionEventDescriptor_fieldAccessorTable
10616             .ensureFieldAccessorsInitialized(
10617                 org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.Builder.class);
10618       }
10619 
10620       // Construct using org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.newBuilder()
10621       private Builder() {
10622         maybeForceBuilderInitialization();
10623       }
10624 
10625       private Builder(
10626           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
10627         super(parent);
10628         maybeForceBuilderInitialization();
10629       }
10630       private void maybeForceBuilderInitialization() {
10631         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
10632           getStoresFieldBuilder();
10633           getServerFieldBuilder();
10634         }
10635       }
10636       private static Builder create() {
10637         return new Builder();
10638       }
10639 
10640       public Builder clear() {
10641         super.clear();
10642         eventType_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType.REGION_OPEN;
10643         bitField0_ = (bitField0_ & ~0x00000001);
10644         tableName_ = com.google.protobuf.ByteString.EMPTY;
10645         bitField0_ = (bitField0_ & ~0x00000002);
10646         encodedRegionName_ = com.google.protobuf.ByteString.EMPTY;
10647         bitField0_ = (bitField0_ & ~0x00000004);
10648         logSequenceNumber_ = 0L;
10649         bitField0_ = (bitField0_ & ~0x00000008);
10650         if (storesBuilder_ == null) {
10651           stores_ = java.util.Collections.emptyList();
10652           bitField0_ = (bitField0_ & ~0x00000010);
10653         } else {
10654           storesBuilder_.clear();
10655         }
10656         if (serverBuilder_ == null) {
10657           server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
10658         } else {
10659           serverBuilder_.clear();
10660         }
10661         bitField0_ = (bitField0_ & ~0x00000020);
10662         regionName_ = com.google.protobuf.ByteString.EMPTY;
10663         bitField0_ = (bitField0_ & ~0x00000040);
10664         return this;
10665       }
10666 
10667       public Builder clone() {
10668         return create().mergeFrom(buildPartial());
10669       }
10670 
10671       public com.google.protobuf.Descriptors.Descriptor
10672           getDescriptorForType() {
10673         return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_RegionEventDescriptor_descriptor;
10674       }
10675 
10676       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor getDefaultInstanceForType() {
10677         return org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.getDefaultInstance();
10678       }
10679 
10680       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor build() {
10681         org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor result = buildPartial();
10682         if (!result.isInitialized()) {
10683           throw newUninitializedMessageException(result);
10684         }
10685         return result;
10686       }
10687 
10688       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor buildPartial() {
10689         org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor(this);
10690         int from_bitField0_ = bitField0_;
10691         int to_bitField0_ = 0;
10692         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
10693           to_bitField0_ |= 0x00000001;
10694         }
10695         result.eventType_ = eventType_;
10696         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
10697           to_bitField0_ |= 0x00000002;
10698         }
10699         result.tableName_ = tableName_;
10700         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
10701           to_bitField0_ |= 0x00000004;
10702         }
10703         result.encodedRegionName_ = encodedRegionName_;
10704         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
10705           to_bitField0_ |= 0x00000008;
10706         }
10707         result.logSequenceNumber_ = logSequenceNumber_;
10708         if (storesBuilder_ == null) {
10709           if (((bitField0_ & 0x00000010) == 0x00000010)) {
10710             stores_ = java.util.Collections.unmodifiableList(stores_);
10711             bitField0_ = (bitField0_ & ~0x00000010);
10712           }
10713           result.stores_ = stores_;
10714         } else {
10715           result.stores_ = storesBuilder_.build();
10716         }
10717         if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
10718           to_bitField0_ |= 0x00000010;
10719         }
10720         if (serverBuilder_ == null) {
10721           result.server_ = server_;
10722         } else {
10723           result.server_ = serverBuilder_.build();
10724         }
10725         if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
10726           to_bitField0_ |= 0x00000020;
10727         }
10728         result.regionName_ = regionName_;
10729         result.bitField0_ = to_bitField0_;
10730         onBuilt();
10731         return result;
10732       }
10733 
10734       public Builder mergeFrom(com.google.protobuf.Message other) {
10735         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor) {
10736           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor)other);
10737         } else {
10738           super.mergeFrom(other);
10739           return this;
10740         }
10741       }
10742 
10743       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor other) {
10744         if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.getDefaultInstance()) return this;
10745         if (other.hasEventType()) {
10746           setEventType(other.getEventType());
10747         }
10748         if (other.hasTableName()) {
10749           setTableName(other.getTableName());
10750         }
10751         if (other.hasEncodedRegionName()) {
10752           setEncodedRegionName(other.getEncodedRegionName());
10753         }
10754         if (other.hasLogSequenceNumber()) {
10755           setLogSequenceNumber(other.getLogSequenceNumber());
10756         }
10757         if (storesBuilder_ == null) {
10758           if (!other.stores_.isEmpty()) {
10759             if (stores_.isEmpty()) {
10760               stores_ = other.stores_;
10761               bitField0_ = (bitField0_ & ~0x00000010);
10762             } else {
10763               ensureStoresIsMutable();
10764               stores_.addAll(other.stores_);
10765             }
10766             onChanged();
10767           }
10768         } else {
10769           if (!other.stores_.isEmpty()) {
10770             if (storesBuilder_.isEmpty()) {
10771               storesBuilder_.dispose();
10772               storesBuilder_ = null;
10773               stores_ = other.stores_;
10774               bitField0_ = (bitField0_ & ~0x00000010);
10775               storesBuilder_ = 
10776                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
10777                    getStoresFieldBuilder() : null;
10778             } else {
10779               storesBuilder_.addAllMessages(other.stores_);
10780             }
10781           }
10782         }
10783         if (other.hasServer()) {
10784           mergeServer(other.getServer());
10785         }
10786         if (other.hasRegionName()) {
10787           setRegionName(other.getRegionName());
10788         }
10789         this.mergeUnknownFields(other.getUnknownFields());
10790         return this;
10791       }
10792 
10793       public final boolean isInitialized() {
10794         if (!hasEventType()) {
10795           
10796           return false;
10797         }
10798         if (!hasTableName()) {
10799           
10800           return false;
10801         }
10802         if (!hasEncodedRegionName()) {
10803           
10804           return false;
10805         }
10806         for (int i = 0; i < getStoresCount(); i++) {
10807           if (!getStores(i).isInitialized()) {
10808             
10809             return false;
10810           }
10811         }
10812         if (hasServer()) {
10813           if (!getServer().isInitialized()) {
10814             
10815             return false;
10816           }
10817         }
10818         return true;
10819       }
10820 
10821       public Builder mergeFrom(
10822           com.google.protobuf.CodedInputStream input,
10823           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10824           throws java.io.IOException {
10825         org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parsedMessage = null;
10826         try {
10827           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
10828         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
10829           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor) e.getUnfinishedMessage();
10830           throw e;
10831         } finally {
10832           if (parsedMessage != null) {
10833             mergeFrom(parsedMessage);
10834           }
10835         }
10836         return this;
10837       }
10838       private int bitField0_;
10839 
10840       // required .hbase.pb.RegionEventDescriptor.EventType event_type = 1;
10841       private org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType eventType_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType.REGION_OPEN;
10842       /**
10843        * <code>required .hbase.pb.RegionEventDescriptor.EventType event_type = 1;</code>
10844        */
10845       public boolean hasEventType() {
10846         return ((bitField0_ & 0x00000001) == 0x00000001);
10847       }
10848       /**
10849        * <code>required .hbase.pb.RegionEventDescriptor.EventType event_type = 1;</code>
10850        */
10851       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType getEventType() {
10852         return eventType_;
10853       }
10854       /**
10855        * <code>required .hbase.pb.RegionEventDescriptor.EventType event_type = 1;</code>
10856        */
10857       public Builder setEventType(org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType value) {
10858         if (value == null) {
10859           throw new NullPointerException();
10860         }
10861         bitField0_ |= 0x00000001;
10862         eventType_ = value;
10863         onChanged();
10864         return this;
10865       }
10866       /**
10867        * <code>required .hbase.pb.RegionEventDescriptor.EventType event_type = 1;</code>
10868        */
10869       public Builder clearEventType() {
10870         bitField0_ = (bitField0_ & ~0x00000001);
10871         eventType_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType.REGION_OPEN;
10872         onChanged();
10873         return this;
10874       }
10875 
10876       // required bytes table_name = 2;
10877       private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY;
10878       /**
10879        * <code>required bytes table_name = 2;</code>
10880        */
10881       public boolean hasTableName() {
10882         return ((bitField0_ & 0x00000002) == 0x00000002);
10883       }
10884       /**
10885        * <code>required bytes table_name = 2;</code>
10886        */
10887       public com.google.protobuf.ByteString getTableName() {
10888         return tableName_;
10889       }
10890       /**
10891        * <code>required bytes table_name = 2;</code>
10892        */
10893       public Builder setTableName(com.google.protobuf.ByteString value) {
10894         if (value == null) {
10895     throw new NullPointerException();
10896   }
10897   bitField0_ |= 0x00000002;
10898         tableName_ = value;
10899         onChanged();
10900         return this;
10901       }
10902       /**
10903        * <code>required bytes table_name = 2;</code>
10904        */
10905       public Builder clearTableName() {
10906         bitField0_ = (bitField0_ & ~0x00000002);
10907         tableName_ = getDefaultInstance().getTableName();
10908         onChanged();
10909         return this;
10910       }
10911 
10912       // required bytes encoded_region_name = 3;
10913       private com.google.protobuf.ByteString encodedRegionName_ = com.google.protobuf.ByteString.EMPTY;
10914       /**
10915        * <code>required bytes encoded_region_name = 3;</code>
10916        */
10917       public boolean hasEncodedRegionName() {
10918         return ((bitField0_ & 0x00000004) == 0x00000004);
10919       }
10920       /**
10921        * <code>required bytes encoded_region_name = 3;</code>
10922        */
10923       public com.google.protobuf.ByteString getEncodedRegionName() {
10924         return encodedRegionName_;
10925       }
10926       /**
10927        * <code>required bytes encoded_region_name = 3;</code>
10928        */
10929       public Builder setEncodedRegionName(com.google.protobuf.ByteString value) {
10930         if (value == null) {
10931     throw new NullPointerException();
10932   }
10933   bitField0_ |= 0x00000004;
10934         encodedRegionName_ = value;
10935         onChanged();
10936         return this;
10937       }
10938       /**
10939        * <code>required bytes encoded_region_name = 3;</code>
10940        */
10941       public Builder clearEncodedRegionName() {
10942         bitField0_ = (bitField0_ & ~0x00000004);
10943         encodedRegionName_ = getDefaultInstance().getEncodedRegionName();
10944         onChanged();
10945         return this;
10946       }
10947 
10948       // optional uint64 log_sequence_number = 4;
10949       private long logSequenceNumber_ ;
10950       /**
10951        * <code>optional uint64 log_sequence_number = 4;</code>
10952        */
10953       public boolean hasLogSequenceNumber() {
10954         return ((bitField0_ & 0x00000008) == 0x00000008);
10955       }
10956       /**
10957        * <code>optional uint64 log_sequence_number = 4;</code>
10958        */
10959       public long getLogSequenceNumber() {
10960         return logSequenceNumber_;
10961       }
10962       /**
10963        * <code>optional uint64 log_sequence_number = 4;</code>
10964        */
10965       public Builder setLogSequenceNumber(long value) {
10966         bitField0_ |= 0x00000008;
10967         logSequenceNumber_ = value;
10968         onChanged();
10969         return this;
10970       }
10971       /**
10972        * <code>optional uint64 log_sequence_number = 4;</code>
10973        */
10974       public Builder clearLogSequenceNumber() {
10975         bitField0_ = (bitField0_ & ~0x00000008);
10976         logSequenceNumber_ = 0L;
10977         onChanged();
10978         return this;
10979       }
10980 
10981       // repeated .hbase.pb.StoreDescriptor stores = 5;
10982       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor> stores_ =
10983         java.util.Collections.emptyList();
10984       private void ensureStoresIsMutable() {
10985         if (!((bitField0_ & 0x00000010) == 0x00000010)) {
10986           stores_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor>(stores_);
10987           bitField0_ |= 0x00000010;
10988          }
10989       }
10990 
10991       private com.google.protobuf.RepeatedFieldBuilder<
10992           org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder> storesBuilder_;
10993 
10994       /**
10995        * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code>
10996        */
10997       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor> getStoresList() {
10998         if (storesBuilder_ == null) {
10999           return java.util.Collections.unmodifiableList(stores_);
11000         } else {
11001           return storesBuilder_.getMessageList();
11002         }
11003       }
11004       /**
11005        * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code>
11006        */
11007       public int getStoresCount() {
11008         if (storesBuilder_ == null) {
11009           return stores_.size();
11010         } else {
11011           return storesBuilder_.getCount();
11012         }
11013       }
11014       /**
11015        * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code>
11016        */
11017       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor getStores(int index) {
11018         if (storesBuilder_ == null) {
11019           return stores_.get(index);
11020         } else {
11021           return storesBuilder_.getMessage(index);
11022         }
11023       }
11024       /**
11025        * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code>
11026        */
11027       public Builder setStores(
11028           int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor value) {
11029         if (storesBuilder_ == null) {
11030           if (value == null) {
11031             throw new NullPointerException();
11032           }
11033           ensureStoresIsMutable();
11034           stores_.set(index, value);
11035           onChanged();
11036         } else {
11037           storesBuilder_.setMessage(index, value);
11038         }
11039         return this;
11040       }
11041       /**
11042        * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code>
11043        */
11044       public Builder setStores(
11045           int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder builderForValue) {
11046         if (storesBuilder_ == null) {
11047           ensureStoresIsMutable();
11048           stores_.set(index, builderForValue.build());
11049           onChanged();
11050         } else {
11051           storesBuilder_.setMessage(index, builderForValue.build());
11052         }
11053         return this;
11054       }
11055       /**
11056        * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code>
11057        */
11058       public Builder addStores(org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor value) {
11059         if (storesBuilder_ == null) {
11060           if (value == null) {
11061             throw new NullPointerException();
11062           }
11063           ensureStoresIsMutable();
11064           stores_.add(value);
11065           onChanged();
11066         } else {
11067           storesBuilder_.addMessage(value);
11068         }
11069         return this;
11070       }
11071       /**
11072        * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code>
11073        */
11074       public Builder addStores(
11075           int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor value) {
11076         if (storesBuilder_ == null) {
11077           if (value == null) {
11078             throw new NullPointerException();
11079           }
11080           ensureStoresIsMutable();
11081           stores_.add(index, value);
11082           onChanged();
11083         } else {
11084           storesBuilder_.addMessage(index, value);
11085         }
11086         return this;
11087       }
11088       /**
11089        * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code>
11090        */
11091       public Builder addStores(
11092           org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder builderForValue) {
11093         if (storesBuilder_ == null) {
11094           ensureStoresIsMutable();
11095           stores_.add(builderForValue.build());
11096           onChanged();
11097         } else {
11098           storesBuilder_.addMessage(builderForValue.build());
11099         }
11100         return this;
11101       }
11102       /**
11103        * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code>
11104        */
11105       public Builder addStores(
11106           int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder builderForValue) {
11107         if (storesBuilder_ == null) {
11108           ensureStoresIsMutable();
11109           stores_.add(index, builderForValue.build());
11110           onChanged();
11111         } else {
11112           storesBuilder_.addMessage(index, builderForValue.build());
11113         }
11114         return this;
11115       }
11116       /**
11117        * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code>
11118        */
11119       public Builder addAllStores(
11120           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor> values) {
11121         if (storesBuilder_ == null) {
11122           ensureStoresIsMutable();
11123           super.addAll(values, stores_);
11124           onChanged();
11125         } else {
11126           storesBuilder_.addAllMessages(values);
11127         }
11128         return this;
11129       }
11130       /**
11131        * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code>
11132        */
11133       public Builder clearStores() {
11134         if (storesBuilder_ == null) {
11135           stores_ = java.util.Collections.emptyList();
11136           bitField0_ = (bitField0_ & ~0x00000010);
11137           onChanged();
11138         } else {
11139           storesBuilder_.clear();
11140         }
11141         return this;
11142       }
11143       /**
11144        * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code>
11145        */
11146       public Builder removeStores(int index) {
11147         if (storesBuilder_ == null) {
11148           ensureStoresIsMutable();
11149           stores_.remove(index);
11150           onChanged();
11151         } else {
11152           storesBuilder_.remove(index);
11153         }
11154         return this;
11155       }
11156       /**
11157        * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code>
11158        */
11159       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder getStoresBuilder(
11160           int index) {
11161         return getStoresFieldBuilder().getBuilder(index);
11162       }
11163       /**
11164        * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code>
11165        */
11166       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder getStoresOrBuilder(
11167           int index) {
11168         if (storesBuilder_ == null) {
11169           return stores_.get(index);  } else {
11170           return storesBuilder_.getMessageOrBuilder(index);
11171         }
11172       }
11173       /**
11174        * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code>
11175        */
11176       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder> 
11177            getStoresOrBuilderList() {
11178         if (storesBuilder_ != null) {
11179           return storesBuilder_.getMessageOrBuilderList();
11180         } else {
11181           return java.util.Collections.unmodifiableList(stores_);
11182         }
11183       }
11184       /**
11185        * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code>
11186        */
11187       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder addStoresBuilder() {
11188         return getStoresFieldBuilder().addBuilder(
11189             org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.getDefaultInstance());
11190       }
11191       /**
11192        * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code>
11193        */
11194       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder addStoresBuilder(
11195           int index) {
11196         return getStoresFieldBuilder().addBuilder(
11197             index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.getDefaultInstance());
11198       }
11199       /**
11200        * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code>
11201        */
11202       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder> 
11203            getStoresBuilderList() {
11204         return getStoresFieldBuilder().getBuilderList();
11205       }
11206       private com.google.protobuf.RepeatedFieldBuilder<
11207           org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder> 
11208           getStoresFieldBuilder() {
11209         if (storesBuilder_ == null) {
11210           storesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
11211               org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder>(
11212                   stores_,
11213                   ((bitField0_ & 0x00000010) == 0x00000010),
11214                   getParentForChildren(),
11215                   isClean());
11216           stores_ = null;
11217         }
11218         return storesBuilder_;
11219       }
11220 
11221       // optional .hbase.pb.ServerName server = 6;
11222       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
11223       private com.google.protobuf.SingleFieldBuilder<
11224           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverBuilder_;
11225       /**
11226        * <code>optional .hbase.pb.ServerName server = 6;</code>
11227        *
11228        * <pre>
11229        * Server who opened the region
11230        * </pre>
11231        */
11232       public boolean hasServer() {
11233         return ((bitField0_ & 0x00000020) == 0x00000020);
11234       }
11235       /**
11236        * <code>optional .hbase.pb.ServerName server = 6;</code>
11237        *
11238        * <pre>
11239        * Server who opened the region
11240        * </pre>
11241        */
11242       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer() {
11243         if (serverBuilder_ == null) {
11244           return server_;
11245         } else {
11246           return serverBuilder_.getMessage();
11247         }
11248       }
11249       /**
11250        * <code>optional .hbase.pb.ServerName server = 6;</code>
11251        *
11252        * <pre>
11253        * Server who opened the region
11254        * </pre>
11255        */
11256       public Builder setServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) {
11257         if (serverBuilder_ == null) {
11258           if (value == null) {
11259             throw new NullPointerException();
11260           }
11261           server_ = value;
11262           onChanged();
11263         } else {
11264           serverBuilder_.setMessage(value);
11265         }
11266         bitField0_ |= 0x00000020;
11267         return this;
11268       }
11269       /**
11270        * <code>optional .hbase.pb.ServerName server = 6;</code>
11271        *
11272        * <pre>
11273        * Server who opened the region
11274        * </pre>
11275        */
11276       public Builder setServer(
11277           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) {
11278         if (serverBuilder_ == null) {
11279           server_ = builderForValue.build();
11280           onChanged();
11281         } else {
11282           serverBuilder_.setMessage(builderForValue.build());
11283         }
11284         bitField0_ |= 0x00000020;
11285         return this;
11286       }
11287       /**
11288        * <code>optional .hbase.pb.ServerName server = 6;</code>
11289        *
11290        * <pre>
11291        * Server who opened the region
11292        * </pre>
11293        */
11294       public Builder mergeServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) {
11295         if (serverBuilder_ == null) {
11296           if (((bitField0_ & 0x00000020) == 0x00000020) &&
11297               server_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) {
11298             server_ =
11299               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(server_).mergeFrom(value).buildPartial();
11300           } else {
11301             server_ = value;
11302           }
11303           onChanged();
11304         } else {
11305           serverBuilder_.mergeFrom(value);
11306         }
11307         bitField0_ |= 0x00000020;
11308         return this;
11309       }
11310       /**
11311        * <code>optional .hbase.pb.ServerName server = 6;</code>
11312        *
11313        * <pre>
11314        * Server who opened the region
11315        * </pre>
11316        */
11317       public Builder clearServer() {
11318         if (serverBuilder_ == null) {
11319           server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
11320           onChanged();
11321         } else {
11322           serverBuilder_.clear();
11323         }
11324         bitField0_ = (bitField0_ & ~0x00000020);
11325         return this;
11326       }
11327       /**
11328        * <code>optional .hbase.pb.ServerName server = 6;</code>
11329        *
11330        * <pre>
11331        * Server who opened the region
11332        * </pre>
11333        */
11334       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getServerBuilder() {
11335         bitField0_ |= 0x00000020;
11336         onChanged();
11337         return getServerFieldBuilder().getBuilder();
11338       }
11339       /**
11340        * <code>optional .hbase.pb.ServerName server = 6;</code>
11341        *
11342        * <pre>
11343        * Server who opened the region
11344        * </pre>
11345        */
11346       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() {
11347         if (serverBuilder_ != null) {
11348           return serverBuilder_.getMessageOrBuilder();
11349         } else {
11350           return server_;
11351         }
11352       }
11353       /**
11354        * <code>optional .hbase.pb.ServerName server = 6;</code>
11355        *
11356        * <pre>
11357        * Server who opened the region
11358        * </pre>
11359        */
11360       private com.google.protobuf.SingleFieldBuilder<
11361           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> 
11362           getServerFieldBuilder() {
11363         if (serverBuilder_ == null) {
11364           serverBuilder_ = new com.google.protobuf.SingleFieldBuilder<
11365               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>(
11366                   server_,
11367                   getParentForChildren(),
11368                   isClean());
11369           server_ = null;
11370         }
11371         return serverBuilder_;
11372       }
11373 
11374       // optional bytes region_name = 7;
11375       private com.google.protobuf.ByteString regionName_ = com.google.protobuf.ByteString.EMPTY;
11376       /**
11377        * <code>optional bytes region_name = 7;</code>
11378        *
11379        * <pre>
11380        * full region name
11381        * </pre>
11382        */
11383       public boolean hasRegionName() {
11384         return ((bitField0_ & 0x00000040) == 0x00000040);
11385       }
11386       /**
11387        * <code>optional bytes region_name = 7;</code>
11388        *
11389        * <pre>
11390        * full region name
11391        * </pre>
11392        */
11393       public com.google.protobuf.ByteString getRegionName() {
11394         return regionName_;
11395       }
11396       /**
11397        * <code>optional bytes region_name = 7;</code>
11398        *
11399        * <pre>
11400        * full region name
11401        * </pre>
11402        */
11403       public Builder setRegionName(com.google.protobuf.ByteString value) {
11404         if (value == null) {
11405     throw new NullPointerException();
11406   }
11407   bitField0_ |= 0x00000040;
11408         regionName_ = value;
11409         onChanged();
11410         return this;
11411       }
11412       /**
11413        * <code>optional bytes region_name = 7;</code>
11414        *
11415        * <pre>
11416        * full region name
11417        * </pre>
11418        */
11419       public Builder clearRegionName() {
11420         bitField0_ = (bitField0_ & ~0x00000040);
11421         regionName_ = getDefaultInstance().getRegionName();
11422         onChanged();
11423         return this;
11424       }
11425 
11426       // @@protoc_insertion_point(builder_scope:hbase.pb.RegionEventDescriptor)
11427     }
11428 
11429     static {
11430       defaultInstance = new RegionEventDescriptor(true);
11431       defaultInstance.initFields();
11432     }
11433 
11434     // @@protoc_insertion_point(class_scope:hbase.pb.RegionEventDescriptor)
11435   }
11436 
11437   public interface WALTrailerOrBuilder
11438       extends com.google.protobuf.MessageOrBuilder {
11439   }
11440   /**
11441    * Protobuf type {@code hbase.pb.WALTrailer}
11442    *
11443    * <pre>
11444    **
11445    * A trailer that is appended to the end of a properly closed WAL file.
11446    * If missing, this is either a legacy or a corrupted WAL file.
11447    * N.B. This trailer currently doesn't contain any information and we
11448    * purposefully don't expose it in the WAL APIs. It's for future growth.
11449    * </pre>
11450    */
11451   public static final class WALTrailer extends
11452       com.google.protobuf.GeneratedMessage
11453       implements WALTrailerOrBuilder {
11454     // Use WALTrailer.newBuilder() to construct.
11455     private WALTrailer(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
11456       super(builder);
11457       this.unknownFields = builder.getUnknownFields();
11458     }
11459     private WALTrailer(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
11460 
11461     private static final WALTrailer defaultInstance;
11462     public static WALTrailer getDefaultInstance() {
11463       return defaultInstance;
11464     }
11465 
11466     public WALTrailer getDefaultInstanceForType() {
11467       return defaultInstance;
11468     }
11469 
11470     private final com.google.protobuf.UnknownFieldSet unknownFields;
11471     @java.lang.Override
11472     public final com.google.protobuf.UnknownFieldSet
11473         getUnknownFields() {
11474       return this.unknownFields;
11475     }
11476     private WALTrailer(
11477         com.google.protobuf.CodedInputStream input,
11478         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11479         throws com.google.protobuf.InvalidProtocolBufferException {
11480       initFields();
11481       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
11482           com.google.protobuf.UnknownFieldSet.newBuilder();
11483       try {
11484         boolean done = false;
11485         while (!done) {
11486           int tag = input.readTag();
11487           switch (tag) {
11488             case 0:
11489               done = true;
11490               break;
11491             default: {
11492               if (!parseUnknownField(input, unknownFields,
11493                                      extensionRegistry, tag)) {
11494                 done = true;
11495               }
11496               break;
11497             }
11498           }
11499         }
11500       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
11501         throw e.setUnfinishedMessage(this);
11502       } catch (java.io.IOException e) {
11503         throw new com.google.protobuf.InvalidProtocolBufferException(
11504             e.getMessage()).setUnfinishedMessage(this);
11505       } finally {
11506         this.unknownFields = unknownFields.build();
11507         makeExtensionsImmutable();
11508       }
11509     }
11510     public static final com.google.protobuf.Descriptors.Descriptor
11511         getDescriptor() {
11512       return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_WALTrailer_descriptor;
11513     }
11514 
11515     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
11516         internalGetFieldAccessorTable() {
11517       return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_WALTrailer_fieldAccessorTable
11518           .ensureFieldAccessorsInitialized(
11519               org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer.Builder.class);
11520     }
11521 
11522     public static com.google.protobuf.Parser<WALTrailer> PARSER =
11523         new com.google.protobuf.AbstractParser<WALTrailer>() {
11524       public WALTrailer parsePartialFrom(
11525           com.google.protobuf.CodedInputStream input,
11526           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11527           throws com.google.protobuf.InvalidProtocolBufferException {
11528         return new WALTrailer(input, extensionRegistry);
11529       }
11530     };
11531 
11532     @java.lang.Override
11533     public com.google.protobuf.Parser<WALTrailer> getParserForType() {
11534       return PARSER;
11535     }
11536 
11537     private void initFields() {
11538     }
11539     private byte memoizedIsInitialized = -1;
11540     public final boolean isInitialized() {
11541       byte isInitialized = memoizedIsInitialized;
11542       if (isInitialized != -1) return isInitialized == 1;
11543 
11544       memoizedIsInitialized = 1;
11545       return true;
11546     }
11547 
11548     public void writeTo(com.google.protobuf.CodedOutputStream output)
11549                         throws java.io.IOException {
11550       getSerializedSize();
11551       getUnknownFields().writeTo(output);
11552     }
11553 
11554     private int memoizedSerializedSize = -1;
11555     public int getSerializedSize() {
11556       int size = memoizedSerializedSize;
11557       if (size != -1) return size;
11558 
11559       size = 0;
11560       size += getUnknownFields().getSerializedSize();
11561       memoizedSerializedSize = size;
11562       return size;
11563     }
11564 
11565     private static final long serialVersionUID = 0L;
11566     @java.lang.Override
11567     protected java.lang.Object writeReplace()
11568         throws java.io.ObjectStreamException {
11569       return super.writeReplace();
11570     }
11571 
11572     @java.lang.Override
11573     public boolean equals(final java.lang.Object obj) {
11574       if (obj == this) {
11575        return true;
11576       }
11577       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer)) {
11578         return super.equals(obj);
11579       }
11580       org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer other = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer) obj;
11581 
11582       boolean result = true;
11583       result = result &&
11584           getUnknownFields().equals(other.getUnknownFields());
11585       return result;
11586     }
11587 
11588     private int memoizedHashCode = 0;
11589     @java.lang.Override
11590     public int hashCode() {
11591       if (memoizedHashCode != 0) {
11592         return memoizedHashCode;
11593       }
11594       int hash = 41;
11595       hash = (19 * hash) + getDescriptorForType().hashCode();
11596       hash = (29 * hash) + getUnknownFields().hashCode();
11597       memoizedHashCode = hash;
11598       return hash;
11599     }
11600 
11601     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer parseFrom(
11602         com.google.protobuf.ByteString data)
11603         throws com.google.protobuf.InvalidProtocolBufferException {
11604       return PARSER.parseFrom(data);
11605     }
11606     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer parseFrom(
11607         com.google.protobuf.ByteString data,
11608         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11609         throws com.google.protobuf.InvalidProtocolBufferException {
11610       return PARSER.parseFrom(data, extensionRegistry);
11611     }
11612     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer parseFrom(byte[] data)
11613         throws com.google.protobuf.InvalidProtocolBufferException {
11614       return PARSER.parseFrom(data);
11615     }
11616     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer parseFrom(
11617         byte[] data,
11618         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11619         throws com.google.protobuf.InvalidProtocolBufferException {
11620       return PARSER.parseFrom(data, extensionRegistry);
11621     }
11622     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer parseFrom(java.io.InputStream input)
11623         throws java.io.IOException {
11624       return PARSER.parseFrom(input);
11625     }
11626     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer parseFrom(
11627         java.io.InputStream input,
11628         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11629         throws java.io.IOException {
11630       return PARSER.parseFrom(input, extensionRegistry);
11631     }
11632     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer parseDelimitedFrom(java.io.InputStream input)
11633         throws java.io.IOException {
11634       return PARSER.parseDelimitedFrom(input);
11635     }
11636     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer parseDelimitedFrom(
11637         java.io.InputStream input,
11638         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11639         throws java.io.IOException {
11640       return PARSER.parseDelimitedFrom(input, extensionRegistry);
11641     }
11642     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer parseFrom(
11643         com.google.protobuf.CodedInputStream input)
11644         throws java.io.IOException {
11645       return PARSER.parseFrom(input);
11646     }
11647     public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer parseFrom(
11648         com.google.protobuf.CodedInputStream input,
11649         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11650         throws java.io.IOException {
11651       return PARSER.parseFrom(input, extensionRegistry);
11652     }
11653 
11654     public static Builder newBuilder() { return Builder.create(); }
11655     public Builder newBuilderForType() { return newBuilder(); }
11656     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer prototype) {
11657       return newBuilder().mergeFrom(prototype);
11658     }
11659     public Builder toBuilder() { return newBuilder(this); }
11660 
11661     @java.lang.Override
11662     protected Builder newBuilderForType(
11663         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
11664       Builder builder = new Builder(parent);
11665       return builder;
11666     }
11667     /**
11668      * Protobuf type {@code hbase.pb.WALTrailer}
11669      *
11670      * <pre>
11671      **
11672      * A trailer that is appended to the end of a properly closed WAL file.
11673      * If missing, this is either a legacy or a corrupted WAL file.
11674      * N.B. This trailer currently doesn't contain any information and we
11675      * purposefully don't expose it in the WAL APIs. It's for future growth.
11676      * </pre>
11677      */
11678     public static final class Builder extends
11679         com.google.protobuf.GeneratedMessage.Builder<Builder>
11680        implements org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailerOrBuilder {
11681       public static final com.google.protobuf.Descriptors.Descriptor
11682           getDescriptor() {
11683         return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_WALTrailer_descriptor;
11684       }
11685 
11686       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
11687           internalGetFieldAccessorTable() {
11688         return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_WALTrailer_fieldAccessorTable
11689             .ensureFieldAccessorsInitialized(
11690                 org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer.Builder.class);
11691       }
11692 
11693       // Construct using org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer.newBuilder()
11694       private Builder() {
11695         maybeForceBuilderInitialization();
11696       }
11697 
11698       private Builder(
11699           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
11700         super(parent);
11701         maybeForceBuilderInitialization();
11702       }
11703       private void maybeForceBuilderInitialization() {
11704         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
11705         }
11706       }
11707       private static Builder create() {
11708         return new Builder();
11709       }
11710 
11711       public Builder clear() {
11712         super.clear();
11713         return this;
11714       }
11715 
11716       public Builder clone() {
11717         return create().mergeFrom(buildPartial());
11718       }
11719 
11720       public com.google.protobuf.Descriptors.Descriptor
11721           getDescriptorForType() {
11722         return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_WALTrailer_descriptor;
11723       }
11724 
11725       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer getDefaultInstanceForType() {
11726         return org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer.getDefaultInstance();
11727       }
11728 
11729       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer build() {
11730         org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer result = buildPartial();
11731         if (!result.isInitialized()) {
11732           throw newUninitializedMessageException(result);
11733         }
11734         return result;
11735       }
11736 
11737       public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer buildPartial() {
11738         org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer(this);
11739         onBuilt();
11740         return result;
11741       }
11742 
11743       public Builder mergeFrom(com.google.protobuf.Message other) {
11744         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer) {
11745           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer)other);
11746         } else {
11747           super.mergeFrom(other);
11748           return this;
11749         }
11750       }
11751 
11752       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer other) {
11753         if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer.getDefaultInstance()) return this;
11754         this.mergeUnknownFields(other.getUnknownFields());
11755         return this;
11756       }
11757 
11758       public final boolean isInitialized() {
11759         return true;
11760       }
11761 
11762       public Builder mergeFrom(
11763           com.google.protobuf.CodedInputStream input,
11764           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11765           throws java.io.IOException {
11766         org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer parsedMessage = null;
11767         try {
11768           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
11769         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
11770           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer) e.getUnfinishedMessage();
11771           throw e;
11772         } finally {
11773           if (parsedMessage != null) {
11774             mergeFrom(parsedMessage);
11775           }
11776         }
11777         return this;
11778       }
11779 
11780       // @@protoc_insertion_point(builder_scope:hbase.pb.WALTrailer)
11781     }
11782 
11783     static {
11784       defaultInstance = new WALTrailer(true);
11785       defaultInstance.initFields();
11786     }
11787 
11788     // @@protoc_insertion_point(class_scope:hbase.pb.WALTrailer)
11789   }
11790 
11791   private static com.google.protobuf.Descriptors.Descriptor
11792     internal_static_hbase_pb_WALHeader_descriptor;
11793   private static
11794     com.google.protobuf.GeneratedMessage.FieldAccessorTable
11795       internal_static_hbase_pb_WALHeader_fieldAccessorTable;
11796   private static com.google.protobuf.Descriptors.Descriptor
11797     internal_static_hbase_pb_WALKey_descriptor;
11798   private static
11799     com.google.protobuf.GeneratedMessage.FieldAccessorTable
11800       internal_static_hbase_pb_WALKey_fieldAccessorTable;
11801   private static com.google.protobuf.Descriptors.Descriptor
11802     internal_static_hbase_pb_FamilyScope_descriptor;
11803   private static
11804     com.google.protobuf.GeneratedMessage.FieldAccessorTable
11805       internal_static_hbase_pb_FamilyScope_fieldAccessorTable;
11806   private static com.google.protobuf.Descriptors.Descriptor
11807     internal_static_hbase_pb_CompactionDescriptor_descriptor;
11808   private static
11809     com.google.protobuf.GeneratedMessage.FieldAccessorTable
11810       internal_static_hbase_pb_CompactionDescriptor_fieldAccessorTable;
11811   private static com.google.protobuf.Descriptors.Descriptor
11812     internal_static_hbase_pb_FlushDescriptor_descriptor;
11813   private static
11814     com.google.protobuf.GeneratedMessage.FieldAccessorTable
11815       internal_static_hbase_pb_FlushDescriptor_fieldAccessorTable;
11816   private static com.google.protobuf.Descriptors.Descriptor
11817     internal_static_hbase_pb_FlushDescriptor_StoreFlushDescriptor_descriptor;
11818   private static
11819     com.google.protobuf.GeneratedMessage.FieldAccessorTable
11820       internal_static_hbase_pb_FlushDescriptor_StoreFlushDescriptor_fieldAccessorTable;
11821   private static com.google.protobuf.Descriptors.Descriptor
11822     internal_static_hbase_pb_StoreDescriptor_descriptor;
11823   private static
11824     com.google.protobuf.GeneratedMessage.FieldAccessorTable
11825       internal_static_hbase_pb_StoreDescriptor_fieldAccessorTable;
11826   private static com.google.protobuf.Descriptors.Descriptor
11827     internal_static_hbase_pb_BulkLoadDescriptor_descriptor;
11828   private static
11829     com.google.protobuf.GeneratedMessage.FieldAccessorTable
11830       internal_static_hbase_pb_BulkLoadDescriptor_fieldAccessorTable;
11831   private static com.google.protobuf.Descriptors.Descriptor
11832     internal_static_hbase_pb_RegionEventDescriptor_descriptor;
11833   private static
11834     com.google.protobuf.GeneratedMessage.FieldAccessorTable
11835       internal_static_hbase_pb_RegionEventDescriptor_fieldAccessorTable;
11836   private static com.google.protobuf.Descriptors.Descriptor
11837     internal_static_hbase_pb_WALTrailer_descriptor;
11838   private static
11839     com.google.protobuf.GeneratedMessage.FieldAccessorTable
11840       internal_static_hbase_pb_WALTrailer_fieldAccessorTable;
11841 
11842   public static com.google.protobuf.Descriptors.FileDescriptor
11843       getDescriptor() {
11844     return descriptor;
11845   }
11846   private static com.google.protobuf.Descriptors.FileDescriptor
11847       descriptor;
11848   static {
11849     java.lang.String[] descriptorData = {
11850       "\n\tWAL.proto\022\010hbase.pb\032\013HBase.proto\032\014Clie" +
11851       "nt.proto\"\217\001\n\tWALHeader\022\027\n\017has_compressio" +
11852       "n\030\001 \001(\010\022\026\n\016encryption_key\030\002 \001(\014\022\033\n\023has_t" +
11853       "ag_compression\030\003 \001(\010\022\027\n\017writer_cls_name\030" +
11854       "\004 \001(\t\022\033\n\023cell_codec_cls_name\030\005 \001(\t\"\273\002\n\006W" +
11855       "ALKey\022\033\n\023encoded_region_name\030\001 \002(\014\022\022\n\nta" +
11856       "ble_name\030\002 \002(\014\022\033\n\023log_sequence_number\030\003 " +
11857       "\002(\004\022\022\n\nwrite_time\030\004 \002(\004\022&\n\ncluster_id\030\005 " +
11858       "\001(\0132\016.hbase.pb.UUIDB\002\030\001\022%\n\006scopes\030\006 \003(\0132" +
11859       "\025.hbase.pb.FamilyScope\022\032\n\022following_kv_c",
11860       "ount\030\007 \001(\r\022#\n\013cluster_ids\030\010 \003(\0132\016.hbase." +
11861       "pb.UUID\022\022\n\nnonceGroup\030\t \001(\004\022\r\n\005nonce\030\n \001" +
11862       "(\004\022\034\n\024orig_sequence_number\030\013 \001(\004\"F\n\013Fami" +
11863       "lyScope\022\016\n\006family\030\001 \002(\014\022\'\n\nscope_type\030\002 " +
11864       "\002(\0162\023.hbase.pb.ScopeType\"\276\001\n\024CompactionD" +
11865       "escriptor\022\022\n\ntable_name\030\001 \002(\014\022\033\n\023encoded" +
11866       "_region_name\030\002 \002(\014\022\023\n\013family_name\030\003 \002(\014\022" +
11867       "\030\n\020compaction_input\030\004 \003(\t\022\031\n\021compaction_" +
11868       "output\030\005 \003(\t\022\026\n\016store_home_dir\030\006 \002(\t\022\023\n\013" +
11869       "region_name\030\007 \001(\014\"\244\003\n\017FlushDescriptor\0225\n",
11870       "\006action\030\001 \002(\0162%.hbase.pb.FlushDescriptor" +
11871       ".FlushAction\022\022\n\ntable_name\030\002 \002(\014\022\033\n\023enco" +
11872       "ded_region_name\030\003 \002(\014\022\035\n\025flush_sequence_" +
11873       "number\030\004 \001(\004\022E\n\rstore_flushes\030\005 \003(\0132..hb" +
11874       "ase.pb.FlushDescriptor.StoreFlushDescrip" +
11875       "tor\022\023\n\013region_name\030\006 \001(\014\032Y\n\024StoreFlushDe" +
11876       "scriptor\022\023\n\013family_name\030\001 \002(\014\022\026\n\016store_h" +
11877       "ome_dir\030\002 \002(\t\022\024\n\014flush_output\030\003 \003(\t\"S\n\013F" +
11878       "lushAction\022\017\n\013START_FLUSH\020\000\022\020\n\014COMMIT_FL" +
11879       "USH\020\001\022\017\n\013ABORT_FLUSH\020\002\022\020\n\014CANNOT_FLUSH\020\003",
11880       "\"R\n\017StoreDescriptor\022\023\n\013family_name\030\001 \002(\014" +
11881       "\022\026\n\016store_home_dir\030\002 \002(\t\022\022\n\nstore_file\030\003" +
11882       " \003(\t\"\237\001\n\022BulkLoadDescriptor\022\'\n\ntable_nam" +
11883       "e\030\001 \002(\0132\023.hbase.pb.TableName\022\033\n\023encoded_" +
11884       "region_name\030\002 \002(\014\022)\n\006stores\030\003 \003(\0132\031.hbas" +
11885       "e.pb.StoreDescriptor\022\030\n\020bulkload_seq_num" +
11886       "\030\004 \002(\003\"\272\002\n\025RegionEventDescriptor\022=\n\neven" +
11887       "t_type\030\001 \002(\0162).hbase.pb.RegionEventDescr" +
11888       "iptor.EventType\022\022\n\ntable_name\030\002 \002(\014\022\033\n\023e" +
11889       "ncoded_region_name\030\003 \002(\014\022\033\n\023log_sequence",
11890       "_number\030\004 \001(\004\022)\n\006stores\030\005 \003(\0132\031.hbase.pb" +
11891       ".StoreDescriptor\022$\n\006server\030\006 \001(\0132\024.hbase" +
11892       ".pb.ServerName\022\023\n\013region_name\030\007 \001(\014\".\n\tE" +
11893       "ventType\022\017\n\013REGION_OPEN\020\000\022\020\n\014REGION_CLOS" +
11894       "E\020\001\"\014\n\nWALTrailer*F\n\tScopeType\022\033\n\027REPLIC" +
11895       "ATION_SCOPE_LOCAL\020\000\022\034\n\030REPLICATION_SCOPE" +
11896       "_GLOBAL\020\001B?\n*org.apache.hadoop.hbase.pro" +
11897       "tobuf.generatedB\tWALProtosH\001\210\001\000\240\001\001"
11898     };
11899     com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
11900       new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
11901         public com.google.protobuf.ExtensionRegistry assignDescriptors(
11902             com.google.protobuf.Descriptors.FileDescriptor root) {
11903           descriptor = root;
11904           internal_static_hbase_pb_WALHeader_descriptor =
11905             getDescriptor().getMessageTypes().get(0);
11906           internal_static_hbase_pb_WALHeader_fieldAccessorTable = new
11907             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
11908               internal_static_hbase_pb_WALHeader_descriptor,
11909               new java.lang.String[] { "HasCompression", "EncryptionKey", "HasTagCompression", "WriterClsName", "CellCodecClsName", });
11910           internal_static_hbase_pb_WALKey_descriptor =
11911             getDescriptor().getMessageTypes().get(1);
11912           internal_static_hbase_pb_WALKey_fieldAccessorTable = new
11913             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
11914               internal_static_hbase_pb_WALKey_descriptor,
11915               new java.lang.String[] { "EncodedRegionName", "TableName", "LogSequenceNumber", "WriteTime", "ClusterId", "Scopes", "FollowingKvCount", "ClusterIds", "NonceGroup", "Nonce", "OrigSequenceNumber", });
11916           internal_static_hbase_pb_FamilyScope_descriptor =
11917             getDescriptor().getMessageTypes().get(2);
11918           internal_static_hbase_pb_FamilyScope_fieldAccessorTable = new
11919             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
11920               internal_static_hbase_pb_FamilyScope_descriptor,
11921               new java.lang.String[] { "Family", "ScopeType", });
11922           internal_static_hbase_pb_CompactionDescriptor_descriptor =
11923             getDescriptor().getMessageTypes().get(3);
11924           internal_static_hbase_pb_CompactionDescriptor_fieldAccessorTable = new
11925             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
11926               internal_static_hbase_pb_CompactionDescriptor_descriptor,
11927               new java.lang.String[] { "TableName", "EncodedRegionName", "FamilyName", "CompactionInput", "CompactionOutput", "StoreHomeDir", "RegionName", });
11928           internal_static_hbase_pb_FlushDescriptor_descriptor =
11929             getDescriptor().getMessageTypes().get(4);
11930           internal_static_hbase_pb_FlushDescriptor_fieldAccessorTable = new
11931             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
11932               internal_static_hbase_pb_FlushDescriptor_descriptor,
11933               new java.lang.String[] { "Action", "TableName", "EncodedRegionName", "FlushSequenceNumber", "StoreFlushes", "RegionName", });
11934           internal_static_hbase_pb_FlushDescriptor_StoreFlushDescriptor_descriptor =
11935             internal_static_hbase_pb_FlushDescriptor_descriptor.getNestedTypes().get(0);
11936           internal_static_hbase_pb_FlushDescriptor_StoreFlushDescriptor_fieldAccessorTable = new
11937             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
11938               internal_static_hbase_pb_FlushDescriptor_StoreFlushDescriptor_descriptor,
11939               new java.lang.String[] { "FamilyName", "StoreHomeDir", "FlushOutput", });
11940           internal_static_hbase_pb_StoreDescriptor_descriptor =
11941             getDescriptor().getMessageTypes().get(5);
11942           internal_static_hbase_pb_StoreDescriptor_fieldAccessorTable = new
11943             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
11944               internal_static_hbase_pb_StoreDescriptor_descriptor,
11945               new java.lang.String[] { "FamilyName", "StoreHomeDir", "StoreFile", });
11946           internal_static_hbase_pb_BulkLoadDescriptor_descriptor =
11947             getDescriptor().getMessageTypes().get(6);
11948           internal_static_hbase_pb_BulkLoadDescriptor_fieldAccessorTable = new
11949             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
11950               internal_static_hbase_pb_BulkLoadDescriptor_descriptor,
11951               new java.lang.String[] { "TableName", "EncodedRegionName", "Stores", "BulkloadSeqNum", });
11952           internal_static_hbase_pb_RegionEventDescriptor_descriptor =
11953             getDescriptor().getMessageTypes().get(7);
11954           internal_static_hbase_pb_RegionEventDescriptor_fieldAccessorTable = new
11955             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
11956               internal_static_hbase_pb_RegionEventDescriptor_descriptor,
11957               new java.lang.String[] { "EventType", "TableName", "EncodedRegionName", "LogSequenceNumber", "Stores", "Server", "RegionName", });
11958           internal_static_hbase_pb_WALTrailer_descriptor =
11959             getDescriptor().getMessageTypes().get(8);
11960           internal_static_hbase_pb_WALTrailer_fieldAccessorTable = new
11961             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
11962               internal_static_hbase_pb_WALTrailer_descriptor,
11963               new java.lang.String[] { });
11964           return null;
11965         }
11966       };
11967     com.google.protobuf.Descriptors.FileDescriptor
11968       .internalBuildGeneratedFileFrom(descriptorData,
11969         new com.google.protobuf.Descriptors.FileDescriptor[] {
11970           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(),
11971           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor(),
11972         }, assigner);
11973   }
11974 
11975   // @@protoc_insertion_point(outer_class_scope)
11976 }