diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java index 86fe515..23698c0 100644 --- hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java +++ hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java @@ -117,11 +117,13 @@ import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescripto import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterService; import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest; import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest; +import org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor; import org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor; import org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor; import org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.FlushAction; import org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor; import org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType; +import org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor; import org.apache.hadoop.hbase.security.access.Permission; import org.apache.hadoop.hbase.security.access.TablePermission; import org.apache.hadoop.hbase.security.access.UserPermission; @@ -2542,8 +2544,7 @@ public final class ProtobufUtil { .setServer(toServerName(server)); for (Map.Entry> entry : storeFiles.entrySet()) { - RegionEventDescriptor.StoreDescriptor.Builder builder - = RegionEventDescriptor.StoreDescriptor.newBuilder() + StoreDescriptor.Builder builder = StoreDescriptor.newBuilder() .setFamilyName(ByteStringer.wrap(entry.getKey())) .setStoreHomeDir(Bytes.toString(entry.getKey())); for (Path path : entry.getValue()) { @@ -2760,4 +2761,34 @@ public final class ProtobufUtil { } return result; } + + /** + * Generates a marker for the WAL so that we propagate the notion of a bulk region load + * throughout the WAL. + * + * @param tableName The tableName into which the bulk load is being imported into. + * @param encodedRegionName Encoded region name of the region which is being bulk loaded. + * @param storeFiles A set of store files of a column family are bulk loaded. + * @param bulkloadSeqId sequence ID (by a force flush) used to create bulk load hfile + * name + * @return The WAL log marker for bulk loads. + */ + public static WALProtos.BulkLoadDescriptor toBulkLoadDescriptor(TableName tableName, + ByteString encodedRegionName, Map> storeFiles, long bulkloadSeqId) { + BulkLoadDescriptor.Builder desc = BulkLoadDescriptor.newBuilder() + .setTableName(ProtobufUtil.toProtoTableName(tableName)) + .setEncodedRegionName(encodedRegionName).setBulkloadSeqNum(bulkloadSeqId); + + for (Map.Entry> entry : storeFiles.entrySet()) { + WALProtos.StoreDescriptor.Builder builder = StoreDescriptor.newBuilder() + .setFamilyName(ByteStringer.wrap(entry.getKey())) + .setStoreHomeDir(Bytes.toString(entry.getKey())); // relative to region + for (Path path : entry.getValue()) { + builder.addStoreFile(path.getName()); + } + desc.addStores(builder); + } + + return desc.build(); + } } diff --git hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WALProtos.java hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WALProtos.java index af61d47..b6e999e 100644 --- hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WALProtos.java +++ hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WALProtos.java @@ -7600,124 +7600,101 @@ public final class WALProtos { // @@protoc_insertion_point(class_scope:FlushDescriptor) } - public interface RegionEventDescriptorOrBuilder + public interface StoreDescriptorOrBuilder extends com.google.protobuf.MessageOrBuilder { - // required .RegionEventDescriptor.EventType event_type = 1; - /** - * required .RegionEventDescriptor.EventType event_type = 1; - */ - boolean hasEventType(); - /** - * required .RegionEventDescriptor.EventType event_type = 1; - */ - org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType getEventType(); - - // required bytes table_name = 2; + // required bytes family_name = 1; /** - * required bytes table_name = 2; + * required bytes family_name = 1; */ - boolean hasTableName(); + boolean hasFamilyName(); /** - * required bytes table_name = 2; + * required bytes family_name = 1; */ - com.google.protobuf.ByteString getTableName(); + com.google.protobuf.ByteString getFamilyName(); - // required bytes encoded_region_name = 3; - /** - * required bytes encoded_region_name = 3; - */ - boolean hasEncodedRegionName(); + // required string store_home_dir = 2; /** - * required bytes encoded_region_name = 3; + * required string store_home_dir = 2; + * + *
+     *relative to region dir
+     * 
*/ - com.google.protobuf.ByteString getEncodedRegionName(); - - // optional uint64 log_sequence_number = 4; + boolean hasStoreHomeDir(); /** - * optional uint64 log_sequence_number = 4; + * required string store_home_dir = 2; + * + *
+     *relative to region dir
+     * 
*/ - boolean hasLogSequenceNumber(); + java.lang.String getStoreHomeDir(); /** - * optional uint64 log_sequence_number = 4; + * required string store_home_dir = 2; + * + *
+     *relative to region dir
+     * 
*/ - long getLogSequenceNumber(); + com.google.protobuf.ByteString + getStoreHomeDirBytes(); - // repeated .RegionEventDescriptor.StoreDescriptor stores = 5; - /** - * repeated .RegionEventDescriptor.StoreDescriptor stores = 5; - */ - java.util.List - getStoresList(); - /** - * repeated .RegionEventDescriptor.StoreDescriptor stores = 5; - */ - org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor getStores(int index); - /** - * repeated .RegionEventDescriptor.StoreDescriptor stores = 5; - */ - int getStoresCount(); + // repeated string store_file = 3; /** - * repeated .RegionEventDescriptor.StoreDescriptor stores = 5; - */ - java.util.List - getStoresOrBuilderList(); - /** - * repeated .RegionEventDescriptor.StoreDescriptor stores = 5; + * repeated string store_file = 3; + * + *
+     * relative to store dir
+     * 
*/ - org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptorOrBuilder getStoresOrBuilder( - int index); - - // optional .ServerName server = 6; + java.util.List + getStoreFileList(); /** - * optional .ServerName server = 6; + * repeated string store_file = 3; * *
-     * Server who opened the region
+     * relative to store dir
      * 
*/ - boolean hasServer(); + int getStoreFileCount(); /** - * optional .ServerName server = 6; + * repeated string store_file = 3; * *
-     * Server who opened the region
+     * relative to store dir
      * 
*/ - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer(); + java.lang.String getStoreFile(int index); /** - * optional .ServerName server = 6; + * repeated string store_file = 3; * *
-     * Server who opened the region
+     * relative to store dir
      * 
*/ - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder(); + com.google.protobuf.ByteString + getStoreFileBytes(int index); } /** - * Protobuf type {@code RegionEventDescriptor} - * - *
-   **
-   * Special WAL entry to hold all related to a region event (open/close).
-   * 
+ * Protobuf type {@code StoreDescriptor} */ - public static final class RegionEventDescriptor extends + public static final class StoreDescriptor extends com.google.protobuf.GeneratedMessage - implements RegionEventDescriptorOrBuilder { - // Use RegionEventDescriptor.newBuilder() to construct. - private RegionEventDescriptor(com.google.protobuf.GeneratedMessage.Builder builder) { + implements StoreDescriptorOrBuilder { + // Use StoreDescriptor.newBuilder() to construct. + private StoreDescriptor(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } - private RegionEventDescriptor(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private StoreDescriptor(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - private static final RegionEventDescriptor defaultInstance; - public static RegionEventDescriptor getDefaultInstance() { + private static final StoreDescriptor defaultInstance; + public static StoreDescriptor getDefaultInstance() { return defaultInstance; } - public RegionEventDescriptor getDefaultInstanceForType() { + public StoreDescriptor getDefaultInstanceForType() { return defaultInstance; } @@ -7727,7 +7704,7 @@ public final class WALProtos { getUnknownFields() { return this.unknownFields; } - private RegionEventDescriptor( + private StoreDescriptor( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { @@ -7750,51 +7727,22 @@ public final class WALProtos { } break; } - case 8: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType value = org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(1, rawValue); - } else { - bitField0_ |= 0x00000001; - eventType_ = value; - } + case 10: { + bitField0_ |= 0x00000001; + familyName_ = input.readBytes(); break; } case 18: { bitField0_ |= 0x00000002; - tableName_ = input.readBytes(); + storeHomeDir_ = input.readBytes(); break; } case 26: { - bitField0_ |= 0x00000004; - encodedRegionName_ = input.readBytes(); - break; - } - case 32: { - bitField0_ |= 0x00000008; - logSequenceNumber_ = input.readUInt64(); - break; - } - case 42: { - if (!((mutable_bitField0_ & 0x00000010) == 0x00000010)) { - stores_ = new java.util.ArrayList(); - mutable_bitField0_ |= 0x00000010; - } - stores_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor.PARSER, extensionRegistry)); - break; - } - case 50: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = null; - if (((bitField0_ & 0x00000010) == 0x00000010)) { - subBuilder = server_.toBuilder(); - } - server_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(server_); - server_ = subBuilder.buildPartial(); + if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { + storeFile_ = new com.google.protobuf.LazyStringArrayList(); + mutable_bitField0_ |= 0x00000004; } - bitField0_ |= 0x00000010; + storeFile_.add(input.readBytes()); break; } } @@ -7805,8 +7753,8 @@ public final class WALProtos { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { - if (((mutable_bitField0_ & 0x00000010) == 0x00000010)) { - stores_ = java.util.Collections.unmodifiableList(stores_); + if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { + storeFile_ = new com.google.protobuf.UnmodifiableLazyStringList(storeFile_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -7814,1195 +7762,2563 @@ public final class WALProtos { } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_RegionEventDescriptor_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_StoreDescriptor_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_RegionEventDescriptor_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_StoreDescriptor_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public RegionEventDescriptor parsePartialFrom( + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public StoreDescriptor parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new RegionEventDescriptor(input, extensionRegistry); + return new StoreDescriptor(input, extensionRegistry); } }; @java.lang.Override - public com.google.protobuf.Parser getParserForType() { + public com.google.protobuf.Parser getParserForType() { return PARSER; } + private int bitField0_; + // required bytes family_name = 1; + public static final int FAMILY_NAME_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString familyName_; /** - * Protobuf enum {@code RegionEventDescriptor.EventType} + * required bytes family_name = 1; */ - public enum EventType - implements com.google.protobuf.ProtocolMessageEnum { - /** - * REGION_OPEN = 0; - */ - REGION_OPEN(0, 0), - /** - * REGION_CLOSE = 1; - */ - REGION_CLOSE(1, 1), - ; - - /** - * REGION_OPEN = 0; - */ - public static final int REGION_OPEN_VALUE = 0; - /** - * REGION_CLOSE = 1; - */ - public static final int REGION_CLOSE_VALUE = 1; - - - public final int getNumber() { return value; } + public boolean hasFamilyName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required bytes family_name = 1; + */ + public com.google.protobuf.ByteString getFamilyName() { + return familyName_; + } - public static EventType valueOf(int value) { - switch (value) { - case 0: return REGION_OPEN; - case 1: return REGION_CLOSE; - default: return null; + // required string store_home_dir = 2; + public static final int STORE_HOME_DIR_FIELD_NUMBER = 2; + private java.lang.Object storeHomeDir_; + /** + * required string store_home_dir = 2; + * + *
+     *relative to region dir
+     * 
+ */ + public boolean hasStoreHomeDir() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * required string store_home_dir = 2; + * + *
+     *relative to region dir
+     * 
+ */ + public java.lang.String getStoreHomeDir() { + java.lang.Object ref = storeHomeDir_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + storeHomeDir_ = s; } + return s; } - - public static com.google.protobuf.Internal.EnumLiteMap - internalGetValueMap() { - return internalValueMap; - } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = - new com.google.protobuf.Internal.EnumLiteMap() { - public EventType findValueByNumber(int number) { - return EventType.valueOf(number); - } - }; - - public final com.google.protobuf.Descriptors.EnumValueDescriptor - getValueDescriptor() { - return getDescriptor().getValues().get(index); - } - public final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptorForType() { - return getDescriptor(); - } - public static final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.getDescriptor().getEnumTypes().get(0); + } + /** + * required string store_home_dir = 2; + * + *
+     *relative to region dir
+     * 
+ */ + public com.google.protobuf.ByteString + getStoreHomeDirBytes() { + java.lang.Object ref = storeHomeDir_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + storeHomeDir_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; } + } - private static final EventType[] VALUES = values(); - - public static EventType valueOf( - com.google.protobuf.Descriptors.EnumValueDescriptor desc) { - if (desc.getType() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "EnumValueDescriptor is not for this type."); - } - return VALUES[desc.getIndex()]; - } - - private final int index; - private final int value; - - private EventType(int index, int value) { - this.index = index; - this.value = value; - } - - // @@protoc_insertion_point(enum_scope:RegionEventDescriptor.EventType) + // repeated string store_file = 3; + public static final int STORE_FILE_FIELD_NUMBER = 3; + private com.google.protobuf.LazyStringList storeFile_; + /** + * repeated string store_file = 3; + * + *
+     * relative to store dir
+     * 
+ */ + public java.util.List + getStoreFileList() { + return storeFile_; } - - public interface StoreDescriptorOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes family_name = 1; - /** - * required bytes family_name = 1; - */ - boolean hasFamilyName(); - /** - * required bytes family_name = 1; - */ - com.google.protobuf.ByteString getFamilyName(); - - // required string store_home_dir = 2; - /** - * required string store_home_dir = 2; - * - *
-       *relative to region dir
-       * 
- */ - boolean hasStoreHomeDir(); - /** - * required string store_home_dir = 2; - * - *
-       *relative to region dir
-       * 
- */ - java.lang.String getStoreHomeDir(); - /** - * required string store_home_dir = 2; - * - *
-       *relative to region dir
-       * 
- */ - com.google.protobuf.ByteString - getStoreHomeDirBytes(); - - // repeated string store_file = 3; - /** - * repeated string store_file = 3; - * - *
-       * relative to store dir
-       * 
- */ - java.util.List - getStoreFileList(); - /** - * repeated string store_file = 3; - * - *
-       * relative to store dir
-       * 
- */ - int getStoreFileCount(); - /** - * repeated string store_file = 3; - * - *
-       * relative to store dir
-       * 
- */ - java.lang.String getStoreFile(int index); - /** - * repeated string store_file = 3; - * - *
-       * relative to store dir
-       * 
- */ - com.google.protobuf.ByteString - getStoreFileBytes(int index); + /** + * repeated string store_file = 3; + * + *
+     * relative to store dir
+     * 
+ */ + public int getStoreFileCount() { + return storeFile_.size(); + } + /** + * repeated string store_file = 3; + * + *
+     * relative to store dir
+     * 
+ */ + public java.lang.String getStoreFile(int index) { + return storeFile_.get(index); } /** - * Protobuf type {@code RegionEventDescriptor.StoreDescriptor} + * repeated string store_file = 3; + * + *
+     * relative to store dir
+     * 
*/ - public static final class StoreDescriptor extends - com.google.protobuf.GeneratedMessage - implements StoreDescriptorOrBuilder { - // Use StoreDescriptor.newBuilder() to construct. - private StoreDescriptor(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private StoreDescriptor(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + public com.google.protobuf.ByteString + getStoreFileBytes(int index) { + return storeFile_.getByteString(index); + } - private static final StoreDescriptor defaultInstance; - public static StoreDescriptor getDefaultInstance() { - return defaultInstance; - } + private void initFields() { + familyName_ = com.google.protobuf.ByteString.EMPTY; + storeHomeDir_ = ""; + storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; - public StoreDescriptor getDefaultInstanceForType() { - return defaultInstance; + if (!hasFamilyName()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasStoreHomeDir()) { + memoizedIsInitialized = 0; + return false; } + memoizedIsInitialized = 1; + return true; + } - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, familyName_); } - private StoreDescriptor( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - familyName_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - storeHomeDir_ = input.readBytes(); - break; - } - case 26: { - if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { - storeFile_ = new com.google.protobuf.LazyStringArrayList(); - mutable_bitField0_ |= 0x00000004; - } - storeFile_.add(input.readBytes()); - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { - storeFile_ = new com.google.protobuf.UnmodifiableLazyStringList(storeFile_); - } - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, getStoreHomeDirBytes()); } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_RegionEventDescriptor_StoreDescriptor_descriptor; + for (int i = 0; i < storeFile_.size(); i++) { + output.writeBytes(3, storeFile_.getByteString(i)); } + getUnknownFields().writeTo(output); + } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_RegionEventDescriptor_StoreDescriptor_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor.Builder.class); - } + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public StoreDescriptor parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new StoreDescriptor(input, extensionRegistry); + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, familyName_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, getStoreHomeDirBytes()); + } + { + int dataSize = 0; + for (int i = 0; i < storeFile_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeBytesSizeNoTag(storeFile_.getByteString(i)); } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; + size += dataSize; + size += 1 * getStoreFileList().size(); } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } - private int bitField0_; - // required bytes family_name = 1; - public static final int FAMILY_NAME_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString familyName_; - /** - * required bytes family_name = 1; - */ - public boolean hasFamilyName() { - return ((bitField0_ & 0x00000001) == 0x00000001); + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; } - /** - * required bytes family_name = 1; - */ - public com.google.protobuf.ByteString getFamilyName() { - return familyName_; + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor)) { + return super.equals(obj); } + org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor other = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor) obj; - // required string store_home_dir = 2; - public static final int STORE_HOME_DIR_FIELD_NUMBER = 2; - private java.lang.Object storeHomeDir_; - /** - * required string store_home_dir = 2; - * - *
-       *relative to region dir
-       * 
- */ - public boolean hasStoreHomeDir() { - return ((bitField0_ & 0x00000002) == 0x00000002); + boolean result = true; + result = result && (hasFamilyName() == other.hasFamilyName()); + if (hasFamilyName()) { + result = result && getFamilyName() + .equals(other.getFamilyName()); } - /** - * required string store_home_dir = 2; - * - *
-       *relative to region dir
-       * 
- */ - public java.lang.String getStoreHomeDir() { - java.lang.Object ref = storeHomeDir_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - if (bs.isValidUtf8()) { - storeHomeDir_ = s; - } - return s; - } + result = result && (hasStoreHomeDir() == other.hasStoreHomeDir()); + if (hasStoreHomeDir()) { + result = result && getStoreHomeDir() + .equals(other.getStoreHomeDir()); } - /** - * required string store_home_dir = 2; - * - *
-       *relative to region dir
-       * 
- */ - public com.google.protobuf.ByteString - getStoreHomeDirBytes() { - java.lang.Object ref = storeHomeDir_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - storeHomeDir_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } + result = result && getStoreFileList() + .equals(other.getStoreFileList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasFamilyName()) { + hash = (37 * hash) + FAMILY_NAME_FIELD_NUMBER; + hash = (53 * hash) + getFamilyName().hashCode(); + } + if (hasStoreHomeDir()) { + hash = (37 * hash) + STORE_HOME_DIR_FIELD_NUMBER; + hash = (53 * hash) + getStoreHomeDir().hashCode(); } + if (getStoreFileCount() > 0) { + hash = (37 * hash) + STORE_FILE_FIELD_NUMBER; + hash = (53 * hash) + getStoreFileList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } - // repeated string store_file = 3; - public static final int STORE_FILE_FIELD_NUMBER = 3; - private com.google.protobuf.LazyStringList storeFile_; - /** - * repeated string store_file = 3; - * - *
-       * relative to store dir
-       * 
- */ - public java.util.List - getStoreFileList() { - return storeFile_; + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code StoreDescriptor} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_StoreDescriptor_descriptor; } - /** - * repeated string store_file = 3; - * - *
-       * relative to store dir
-       * 
- */ - public int getStoreFileCount() { - return storeFile_.size(); + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_StoreDescriptor_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder.class); } - /** - * repeated string store_file = 3; - * - *
-       * relative to store dir
-       * 
- */ - public java.lang.String getStoreFile(int index) { - return storeFile_.get(index); + + // Construct using org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); } - /** - * repeated string store_file = 3; - * - *
-       * relative to store dir
-       * 
- */ - public com.google.protobuf.ByteString - getStoreFileBytes(int index) { - return storeFile_.getByteString(index); + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); } - private void initFields() { + public Builder clear() { + super.clear(); familyName_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); storeHomeDir_ = ""; + bitField0_ = (bitField0_ & ~0x00000002); storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000004); + return this; } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - if (!hasFamilyName()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasStoreHomeDir()) { - memoizedIsInitialized = 0; - return false; + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_StoreDescriptor_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor build() { + org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); } - memoizedIsInitialized = 1; - return true; + return result; } - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, familyName_); + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, getStoreHomeDirBytes()); + result.familyName_ = familyName_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; } - for (int i = 0; i < storeFile_.size(); i++) { - output.writeBytes(3, storeFile_.getByteString(i)); + result.storeHomeDir_ = storeHomeDir_; + if (((bitField0_ & 0x00000004) == 0x00000004)) { + storeFile_ = new com.google.protobuf.UnmodifiableLazyStringList( + storeFile_); + bitField0_ = (bitField0_ & ~0x00000004); } - getUnknownFields().writeTo(output); + result.storeFile_ = storeFile_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; } - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, familyName_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, getStoreHomeDirBytes()); - } - { - int dataSize = 0; - for (int i = 0; i < storeFile_.size(); i++) { - dataSize += com.google.protobuf.CodedOutputStream - .computeBytesSizeNoTag(storeFile_.getByteString(i)); - } - size += dataSize; - size += 1 * getStoreFileList().size(); + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor)other); + } else { + super.mergeFrom(other); + return this; } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); } - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor)) { - return super.equals(obj); + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.getDefaultInstance()) return this; + if (other.hasFamilyName()) { + setFamilyName(other.getFamilyName()); } - org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor other = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor) obj; - - boolean result = true; - result = result && (hasFamilyName() == other.hasFamilyName()); - if (hasFamilyName()) { - result = result && getFamilyName() - .equals(other.getFamilyName()); + if (other.hasStoreHomeDir()) { + bitField0_ |= 0x00000002; + storeHomeDir_ = other.storeHomeDir_; + onChanged(); } - result = result && (hasStoreHomeDir() == other.hasStoreHomeDir()); - if (hasStoreHomeDir()) { - result = result && getStoreHomeDir() - .equals(other.getStoreHomeDir()); + if (!other.storeFile_.isEmpty()) { + if (storeFile_.isEmpty()) { + storeFile_ = other.storeFile_; + bitField0_ = (bitField0_ & ~0x00000004); + } else { + ensureStoreFileIsMutable(); + storeFile_.addAll(other.storeFile_); + } + onChanged(); } - result = result && getStoreFileList() - .equals(other.getStoreFileList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; + this.mergeUnknownFields(other.getUnknownFields()); + return this; } - private int memoizedHashCode = 0; - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasFamilyName()) { - hash = (37 * hash) + FAMILY_NAME_FIELD_NUMBER; - hash = (53 * hash) + getFamilyName().hashCode(); - } - if (hasStoreHomeDir()) { - hash = (37 * hash) + STORE_HOME_DIR_FIELD_NUMBER; - hash = (53 * hash) + getStoreHomeDir().hashCode(); + public final boolean isInitialized() { + if (!hasFamilyName()) { + + return false; } - if (getStoreFileCount() > 0) { - hash = (37 * hash) + STORE_FILE_FIELD_NUMBER; - hash = (53 * hash) + getStoreFileList().hashCode(); + if (!hasStoreHomeDir()) { + + return false; } - hash = (29 * hash) + getUnknownFields().hashCode(); - memoizedHashCode = hash; - return hash; + return true; } - public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor parseFrom( - java.io.InputStream input, + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; } - public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + private int bitField0_; + + // required bytes family_name = 1; + private com.google.protobuf.ByteString familyName_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes family_name = 1; + */ + public boolean hasFamilyName() { + return ((bitField0_ & 0x00000001) == 0x00000001); } - public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); + /** + * required bytes family_name = 1; + */ + public com.google.protobuf.ByteString getFamilyName() { + return familyName_; } - public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + /** + * required bytes family_name = 1; + */ + public Builder setFamilyName(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + familyName_ = value; + onChanged(); + return this; } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor prototype) { - return newBuilder().mergeFrom(prototype); + /** + * required bytes family_name = 1; + */ + public Builder clearFamilyName() { + bitField0_ = (bitField0_ & ~0x00000001); + familyName_ = getDefaultInstance().getFamilyName(); + onChanged(); + return this; } - public Builder toBuilder() { return newBuilder(this); } - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; + // required string store_home_dir = 2; + private java.lang.Object storeHomeDir_ = ""; + /** + * required string store_home_dir = 2; + * + *
+       *relative to region dir
+       * 
+ */ + public boolean hasStoreHomeDir() { + return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * Protobuf type {@code RegionEventDescriptor.StoreDescriptor} + * required string store_home_dir = 2; + * + *
+       *relative to region dir
+       * 
*/ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptorOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_RegionEventDescriptor_StoreDescriptor_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_RegionEventDescriptor_StoreDescriptor_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } + public java.lang.String getStoreHomeDir() { + java.lang.Object ref = storeHomeDir_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + storeHomeDir_ = s; + return s; + } else { + return (java.lang.String) ref; } - private static Builder create() { - return new Builder(); + } + /** + * required string store_home_dir = 2; + * + *
+       *relative to region dir
+       * 
+ */ + public com.google.protobuf.ByteString + getStoreHomeDirBytes() { + java.lang.Object ref = storeHomeDir_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + storeHomeDir_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; } - - public Builder clear() { - super.clear(); - familyName_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - storeHomeDir_ = ""; - bitField0_ = (bitField0_ & ~0x00000002); - storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY; - bitField0_ = (bitField0_ & ~0x00000004); + } + /** + * required string store_home_dir = 2; + * + *
+       *relative to region dir
+       * 
+ */ + public Builder setStoreHomeDir( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + storeHomeDir_ = value; + onChanged(); + return this; + } + /** + * required string store_home_dir = 2; + * + *
+       *relative to region dir
+       * 
+ */ + public Builder clearStoreHomeDir() { + bitField0_ = (bitField0_ & ~0x00000002); + storeHomeDir_ = getDefaultInstance().getStoreHomeDir(); + onChanged(); + return this; + } + /** + * required string store_home_dir = 2; + * + *
+       *relative to region dir
+       * 
+ */ + public Builder setStoreHomeDirBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + storeHomeDir_ = value; + onChanged(); + return this; + } + + // repeated string store_file = 3; + private com.google.protobuf.LazyStringList storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY; + private void ensureStoreFileIsMutable() { + if (!((bitField0_ & 0x00000004) == 0x00000004)) { + storeFile_ = new com.google.protobuf.LazyStringArrayList(storeFile_); + bitField0_ |= 0x00000004; + } + } + /** + * repeated string store_file = 3; + * + *
+       * relative to store dir
+       * 
+ */ + public java.util.List + getStoreFileList() { + return java.util.Collections.unmodifiableList(storeFile_); + } + /** + * repeated string store_file = 3; + * + *
+       * relative to store dir
+       * 
+ */ + public int getStoreFileCount() { + return storeFile_.size(); + } + /** + * repeated string store_file = 3; + * + *
+       * relative to store dir
+       * 
+ */ + public java.lang.String getStoreFile(int index) { + return storeFile_.get(index); + } + /** + * repeated string store_file = 3; + * + *
+       * relative to store dir
+       * 
+ */ + public com.google.protobuf.ByteString + getStoreFileBytes(int index) { + return storeFile_.getByteString(index); + } + /** + * repeated string store_file = 3; + * + *
+       * relative to store dir
+       * 
+ */ + public Builder setStoreFile( + int index, java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureStoreFileIsMutable(); + storeFile_.set(index, value); + onChanged(); + return this; + } + /** + * repeated string store_file = 3; + * + *
+       * relative to store dir
+       * 
+ */ + public Builder addStoreFile( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureStoreFileIsMutable(); + storeFile_.add(value); + onChanged(); + return this; + } + /** + * repeated string store_file = 3; + * + *
+       * relative to store dir
+       * 
+ */ + public Builder addAllStoreFile( + java.lang.Iterable values) { + ensureStoreFileIsMutable(); + super.addAll(values, storeFile_); + onChanged(); + return this; + } + /** + * repeated string store_file = 3; + * + *
+       * relative to store dir
+       * 
+ */ + public Builder clearStoreFile() { + storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000004); + onChanged(); + return this; + } + /** + * repeated string store_file = 3; + * + *
+       * relative to store dir
+       * 
+ */ + public Builder addStoreFileBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureStoreFileIsMutable(); + storeFile_.add(value); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:StoreDescriptor) + } + + static { + defaultInstance = new StoreDescriptor(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:StoreDescriptor) + } + + public interface RegionEventDescriptorOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionEventDescriptor.EventType event_type = 1; + /** + * required .RegionEventDescriptor.EventType event_type = 1; + */ + boolean hasEventType(); + /** + * required .RegionEventDescriptor.EventType event_type = 1; + */ + org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType getEventType(); + + // required bytes table_name = 2; + /** + * required bytes table_name = 2; + */ + boolean hasTableName(); + /** + * required bytes table_name = 2; + */ + com.google.protobuf.ByteString getTableName(); + + // required bytes encoded_region_name = 3; + /** + * required bytes encoded_region_name = 3; + */ + boolean hasEncodedRegionName(); + /** + * required bytes encoded_region_name = 3; + */ + com.google.protobuf.ByteString getEncodedRegionName(); + + // optional uint64 log_sequence_number = 4; + /** + * optional uint64 log_sequence_number = 4; + */ + boolean hasLogSequenceNumber(); + /** + * optional uint64 log_sequence_number = 4; + */ + long getLogSequenceNumber(); + + // repeated .StoreDescriptor stores = 5; + /** + * repeated .StoreDescriptor stores = 5; + */ + java.util.List + getStoresList(); + /** + * repeated .StoreDescriptor stores = 5; + */ + org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor getStores(int index); + /** + * repeated .StoreDescriptor stores = 5; + */ + int getStoresCount(); + /** + * repeated .StoreDescriptor stores = 5; + */ + java.util.List + getStoresOrBuilderList(); + /** + * repeated .StoreDescriptor stores = 5; + */ + org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder getStoresOrBuilder( + int index); + + // optional .ServerName server = 6; + /** + * optional .ServerName server = 6; + * + *
+     * Server who opened the region
+     * 
+ */ + boolean hasServer(); + /** + * optional .ServerName server = 6; + * + *
+     * Server who opened the region
+     * 
+ */ + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer(); + /** + * optional .ServerName server = 6; + * + *
+     * Server who opened the region
+     * 
+ */ + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder(); + } + /** + * Protobuf type {@code RegionEventDescriptor} + * + *
+   **
+   * Special WAL entry to hold all related to a region event (open/close).
+   * 
+ */ + public static final class RegionEventDescriptor extends + com.google.protobuf.GeneratedMessage + implements RegionEventDescriptorOrBuilder { + // Use RegionEventDescriptor.newBuilder() to construct. + private RegionEventDescriptor(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private RegionEventDescriptor(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final RegionEventDescriptor defaultInstance; + public static RegionEventDescriptor getDefaultInstance() { + return defaultInstance; + } + + public RegionEventDescriptor getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private RegionEventDescriptor( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType value = org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(1, rawValue); + } else { + bitField0_ |= 0x00000001; + eventType_ = value; + } + break; + } + case 18: { + bitField0_ |= 0x00000002; + tableName_ = input.readBytes(); + break; + } + case 26: { + bitField0_ |= 0x00000004; + encodedRegionName_ = input.readBytes(); + break; + } + case 32: { + bitField0_ |= 0x00000008; + logSequenceNumber_ = input.readUInt64(); + break; + } + case 42: { + if (!((mutable_bitField0_ & 0x00000010) == 0x00000010)) { + stores_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000010; + } + stores_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.PARSER, extensionRegistry)); + break; + } + case 50: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = null; + if (((bitField0_ & 0x00000010) == 0x00000010)) { + subBuilder = server_.toBuilder(); + } + server_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(server_); + server_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000010; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000010) == 0x00000010)) { + stores_ = java.util.Collections.unmodifiableList(stores_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_RegionEventDescriptor_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_RegionEventDescriptor_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public RegionEventDescriptor parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RegionEventDescriptor(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + /** + * Protobuf enum {@code RegionEventDescriptor.EventType} + */ + public enum EventType + implements com.google.protobuf.ProtocolMessageEnum { + /** + * REGION_OPEN = 0; + */ + REGION_OPEN(0, 0), + /** + * REGION_CLOSE = 1; + */ + REGION_CLOSE(1, 1), + ; + + /** + * REGION_OPEN = 0; + */ + public static final int REGION_OPEN_VALUE = 0; + /** + * REGION_CLOSE = 1; + */ + public static final int REGION_CLOSE_VALUE = 1; + + + public final int getNumber() { return value; } + + public static EventType valueOf(int value) { + switch (value) { + case 0: return REGION_OPEN; + case 1: return REGION_CLOSE; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static com.google.protobuf.Internal.EnumLiteMap + internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public EventType findValueByNumber(int number) { + return EventType.valueOf(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(index); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.getDescriptor().getEnumTypes().get(0); + } + + private static final EventType[] VALUES = values(); + + public static EventType valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + return VALUES[desc.getIndex()]; + } + + private final int index; + private final int value; + + private EventType(int index, int value) { + this.index = index; + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:RegionEventDescriptor.EventType) + } + + private int bitField0_; + // required .RegionEventDescriptor.EventType event_type = 1; + public static final int EVENT_TYPE_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType eventType_; + /** + * required .RegionEventDescriptor.EventType event_type = 1; + */ + public boolean hasEventType() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required .RegionEventDescriptor.EventType event_type = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType getEventType() { + return eventType_; + } + + // required bytes table_name = 2; + public static final int TABLE_NAME_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString tableName_; + /** + * required bytes table_name = 2; + */ + public boolean hasTableName() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * required bytes table_name = 2; + */ + public com.google.protobuf.ByteString getTableName() { + return tableName_; + } + + // required bytes encoded_region_name = 3; + public static final int ENCODED_REGION_NAME_FIELD_NUMBER = 3; + private com.google.protobuf.ByteString encodedRegionName_; + /** + * required bytes encoded_region_name = 3; + */ + public boolean hasEncodedRegionName() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * required bytes encoded_region_name = 3; + */ + public com.google.protobuf.ByteString getEncodedRegionName() { + return encodedRegionName_; + } + + // optional uint64 log_sequence_number = 4; + public static final int LOG_SEQUENCE_NUMBER_FIELD_NUMBER = 4; + private long logSequenceNumber_; + /** + * optional uint64 log_sequence_number = 4; + */ + public boolean hasLogSequenceNumber() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + /** + * optional uint64 log_sequence_number = 4; + */ + public long getLogSequenceNumber() { + return logSequenceNumber_; + } + + // repeated .StoreDescriptor stores = 5; + public static final int STORES_FIELD_NUMBER = 5; + private java.util.List stores_; + /** + * repeated .StoreDescriptor stores = 5; + */ + public java.util.List getStoresList() { + return stores_; + } + /** + * repeated .StoreDescriptor stores = 5; + */ + public java.util.List + getStoresOrBuilderList() { + return stores_; + } + /** + * repeated .StoreDescriptor stores = 5; + */ + public int getStoresCount() { + return stores_.size(); + } + /** + * repeated .StoreDescriptor stores = 5; + */ + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor getStores(int index) { + return stores_.get(index); + } + /** + * repeated .StoreDescriptor stores = 5; + */ + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder getStoresOrBuilder( + int index) { + return stores_.get(index); + } + + // optional .ServerName server = 6; + public static final int SERVER_FIELD_NUMBER = 6; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName server_; + /** + * optional .ServerName server = 6; + * + *
+     * Server who opened the region
+     * 
+ */ + public boolean hasServer() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + /** + * optional .ServerName server = 6; + * + *
+     * Server who opened the region
+     * 
+ */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer() { + return server_; + } + /** + * optional .ServerName server = 6; + * + *
+     * Server who opened the region
+     * 
+ */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() { + return server_; + } + + private void initFields() { + eventType_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType.REGION_OPEN; + tableName_ = com.google.protobuf.ByteString.EMPTY; + encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; + logSequenceNumber_ = 0L; + stores_ = java.util.Collections.emptyList(); + server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasEventType()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasTableName()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasEncodedRegionName()) { + memoizedIsInitialized = 0; + return false; + } + for (int i = 0; i < getStoresCount(); i++) { + if (!getStores(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + if (hasServer()) { + if (!getServer().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeEnum(1, eventType_.getNumber()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, tableName_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeBytes(3, encodedRegionName_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeUInt64(4, logSequenceNumber_); + } + for (int i = 0; i < stores_.size(); i++) { + output.writeMessage(5, stores_.get(i)); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + output.writeMessage(6, server_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(1, eventType_.getNumber()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, tableName_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(3, encodedRegionName_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(4, logSequenceNumber_); + } + for (int i = 0; i < stores_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(5, stores_.get(i)); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(6, server_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor other = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor) obj; + + boolean result = true; + result = result && (hasEventType() == other.hasEventType()); + if (hasEventType()) { + result = result && + (getEventType() == other.getEventType()); + } + result = result && (hasTableName() == other.hasTableName()); + if (hasTableName()) { + result = result && getTableName() + .equals(other.getTableName()); + } + result = result && (hasEncodedRegionName() == other.hasEncodedRegionName()); + if (hasEncodedRegionName()) { + result = result && getEncodedRegionName() + .equals(other.getEncodedRegionName()); + } + result = result && (hasLogSequenceNumber() == other.hasLogSequenceNumber()); + if (hasLogSequenceNumber()) { + result = result && (getLogSequenceNumber() + == other.getLogSequenceNumber()); + } + result = result && getStoresList() + .equals(other.getStoresList()); + result = result && (hasServer() == other.hasServer()); + if (hasServer()) { + result = result && getServer() + .equals(other.getServer()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasEventType()) { + hash = (37 * hash) + EVENT_TYPE_FIELD_NUMBER; + hash = (53 * hash) + hashEnum(getEventType()); + } + if (hasTableName()) { + hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER; + hash = (53 * hash) + getTableName().hashCode(); + } + if (hasEncodedRegionName()) { + hash = (37 * hash) + ENCODED_REGION_NAME_FIELD_NUMBER; + hash = (53 * hash) + getEncodedRegionName().hashCode(); + } + if (hasLogSequenceNumber()) { + hash = (37 * hash) + LOG_SEQUENCE_NUMBER_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getLogSequenceNumber()); + } + if (getStoresCount() > 0) { + hash = (37 * hash) + STORES_FIELD_NUMBER; + hash = (53 * hash) + getStoresList().hashCode(); + } + if (hasServer()) { + hash = (37 * hash) + SERVER_FIELD_NUMBER; + hash = (53 * hash) + getServer().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code RegionEventDescriptor} + * + *
+     **
+     * Special WAL entry to hold all related to a region event (open/close).
+     * 
+ */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptorOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_RegionEventDescriptor_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_RegionEventDescriptor_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getStoresFieldBuilder(); + getServerFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + eventType_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType.REGION_OPEN; + bitField0_ = (bitField0_ & ~0x00000001); + tableName_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000004); + logSequenceNumber_ = 0L; + bitField0_ = (bitField0_ & ~0x00000008); + if (storesBuilder_ == null) { + stores_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000010); + } else { + storesBuilder_.clear(); + } + if (serverBuilder_ == null) { + server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + } else { + serverBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000020); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_RegionEventDescriptor_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor build() { + org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.eventType_ = eventType_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.tableName_ = tableName_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.encodedRegionName_ = encodedRegionName_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + result.logSequenceNumber_ = logSequenceNumber_; + if (storesBuilder_ == null) { + if (((bitField0_ & 0x00000010) == 0x00000010)) { + stores_ = java.util.Collections.unmodifiableList(stores_); + bitField0_ = (bitField0_ & ~0x00000010); + } + result.stores_ = stores_; + } else { + result.stores_ = storesBuilder_.build(); + } + if (((from_bitField0_ & 0x00000020) == 0x00000020)) { + to_bitField0_ |= 0x00000010; + } + if (serverBuilder_ == null) { + result.server_ = server_; + } else { + result.server_ = serverBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor)other); + } else { + super.mergeFrom(other); return this; } + } - public Builder clone() { - return create().mergeFrom(buildPartial()); + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.getDefaultInstance()) return this; + if (other.hasEventType()) { + setEventType(other.getEventType()); + } + if (other.hasTableName()) { + setTableName(other.getTableName()); + } + if (other.hasEncodedRegionName()) { + setEncodedRegionName(other.getEncodedRegionName()); + } + if (other.hasLogSequenceNumber()) { + setLogSequenceNumber(other.getLogSequenceNumber()); + } + if (storesBuilder_ == null) { + if (!other.stores_.isEmpty()) { + if (stores_.isEmpty()) { + stores_ = other.stores_; + bitField0_ = (bitField0_ & ~0x00000010); + } else { + ensureStoresIsMutable(); + stores_.addAll(other.stores_); + } + onChanged(); + } + } else { + if (!other.stores_.isEmpty()) { + if (storesBuilder_.isEmpty()) { + storesBuilder_.dispose(); + storesBuilder_ = null; + stores_ = other.stores_; + bitField0_ = (bitField0_ & ~0x00000010); + storesBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getStoresFieldBuilder() : null; + } else { + storesBuilder_.addAllMessages(other.stores_); + } + } + } + if (other.hasServer()) { + mergeServer(other.getServer()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasEventType()) { + + return false; + } + if (!hasTableName()) { + + return false; + } + if (!hasEncodedRegionName()) { + + return false; + } + for (int i = 0; i < getStoresCount(); i++) { + if (!getStores(i).isInitialized()) { + + return false; + } + } + if (hasServer()) { + if (!getServer().isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required .RegionEventDescriptor.EventType event_type = 1; + private org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType eventType_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType.REGION_OPEN; + /** + * required .RegionEventDescriptor.EventType event_type = 1; + */ + public boolean hasEventType() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required .RegionEventDescriptor.EventType event_type = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType getEventType() { + return eventType_; + } + /** + * required .RegionEventDescriptor.EventType event_type = 1; + */ + public Builder setEventType(org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType value) { + if (value == null) { + throw new NullPointerException(); } + bitField0_ |= 0x00000001; + eventType_ = value; + onChanged(); + return this; + } + /** + * required .RegionEventDescriptor.EventType event_type = 1; + */ + public Builder clearEventType() { + bitField0_ = (bitField0_ & ~0x00000001); + eventType_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType.REGION_OPEN; + onChanged(); + return this; + } + + // required bytes table_name = 2; + private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes table_name = 2; + */ + public boolean hasTableName() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * required bytes table_name = 2; + */ + public com.google.protobuf.ByteString getTableName() { + return tableName_; + } + /** + * required bytes table_name = 2; + */ + public Builder setTableName(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + tableName_ = value; + onChanged(); + return this; + } + /** + * required bytes table_name = 2; + */ + public Builder clearTableName() { + bitField0_ = (bitField0_ & ~0x00000002); + tableName_ = getDefaultInstance().getTableName(); + onChanged(); + return this; + } + + // required bytes encoded_region_name = 3; + private com.google.protobuf.ByteString encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes encoded_region_name = 3; + */ + public boolean hasEncodedRegionName() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * required bytes encoded_region_name = 3; + */ + public com.google.protobuf.ByteString getEncodedRegionName() { + return encodedRegionName_; + } + /** + * required bytes encoded_region_name = 3; + */ + public Builder setEncodedRegionName(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + encodedRegionName_ = value; + onChanged(); + return this; + } + /** + * required bytes encoded_region_name = 3; + */ + public Builder clearEncodedRegionName() { + bitField0_ = (bitField0_ & ~0x00000004); + encodedRegionName_ = getDefaultInstance().getEncodedRegionName(); + onChanged(); + return this; + } + + // optional uint64 log_sequence_number = 4; + private long logSequenceNumber_ ; + /** + * optional uint64 log_sequence_number = 4; + */ + public boolean hasLogSequenceNumber() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + /** + * optional uint64 log_sequence_number = 4; + */ + public long getLogSequenceNumber() { + return logSequenceNumber_; + } + /** + * optional uint64 log_sequence_number = 4; + */ + public Builder setLogSequenceNumber(long value) { + bitField0_ |= 0x00000008; + logSequenceNumber_ = value; + onChanged(); + return this; + } + /** + * optional uint64 log_sequence_number = 4; + */ + public Builder clearLogSequenceNumber() { + bitField0_ = (bitField0_ & ~0x00000008); + logSequenceNumber_ = 0L; + onChanged(); + return this; + } - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_RegionEventDescriptor_StoreDescriptor_descriptor; - } + // repeated .StoreDescriptor stores = 5; + private java.util.List stores_ = + java.util.Collections.emptyList(); + private void ensureStoresIsMutable() { + if (!((bitField0_ & 0x00000010) == 0x00000010)) { + stores_ = new java.util.ArrayList(stores_); + bitField0_ |= 0x00000010; + } + } - public org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor.getDefaultInstance(); - } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder> storesBuilder_; - public org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor build() { - org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; + /** + * repeated .StoreDescriptor stores = 5; + */ + public java.util.List getStoresList() { + if (storesBuilder_ == null) { + return java.util.Collections.unmodifiableList(stores_); + } else { + return storesBuilder_.getMessageList(); } - - public org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.familyName_ = familyName_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.storeHomeDir_ = storeHomeDir_; - if (((bitField0_ & 0x00000004) == 0x00000004)) { - storeFile_ = new com.google.protobuf.UnmodifiableLazyStringList( - storeFile_); - bitField0_ = (bitField0_ & ~0x00000004); - } - result.storeFile_ = storeFile_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; + } + /** + * repeated .StoreDescriptor stores = 5; + */ + public int getStoresCount() { + if (storesBuilder_ == null) { + return stores_.size(); + } else { + return storesBuilder_.getCount(); } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor)other); - } else { - super.mergeFrom(other); - return this; - } + } + /** + * repeated .StoreDescriptor stores = 5; + */ + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor getStores(int index) { + if (storesBuilder_ == null) { + return stores_.get(index); + } else { + return storesBuilder_.getMessage(index); } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor.getDefaultInstance()) return this; - if (other.hasFamilyName()) { - setFamilyName(other.getFamilyName()); - } - if (other.hasStoreHomeDir()) { - bitField0_ |= 0x00000002; - storeHomeDir_ = other.storeHomeDir_; - onChanged(); - } - if (!other.storeFile_.isEmpty()) { - if (storeFile_.isEmpty()) { - storeFile_ = other.storeFile_; - bitField0_ = (bitField0_ & ~0x00000004); - } else { - ensureStoreFileIsMutable(); - storeFile_.addAll(other.storeFile_); - } - onChanged(); + } + /** + * repeated .StoreDescriptor stores = 5; + */ + public Builder setStores( + int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor value) { + if (storesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); } - this.mergeUnknownFields(other.getUnknownFields()); - return this; + ensureStoresIsMutable(); + stores_.set(index, value); + onChanged(); + } else { + storesBuilder_.setMessage(index, value); } - - public final boolean isInitialized() { - if (!hasFamilyName()) { - - return false; - } - if (!hasStoreHomeDir()) { - - return false; - } - return true; + return this; + } + /** + * repeated .StoreDescriptor stores = 5; + */ + public Builder setStores( + int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder builderForValue) { + if (storesBuilder_ == null) { + ensureStoresIsMutable(); + stores_.set(index, builderForValue.build()); + onChanged(); + } else { + storesBuilder_.setMessage(index, builderForValue.build()); } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } + return this; + } + /** + * repeated .StoreDescriptor stores = 5; + */ + public Builder addStores(org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor value) { + if (storesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); } - return this; - } - private int bitField0_; - - // required bytes family_name = 1; - private com.google.protobuf.ByteString familyName_ = com.google.protobuf.ByteString.EMPTY; - /** - * required bytes family_name = 1; - */ - public boolean hasFamilyName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required bytes family_name = 1; - */ - public com.google.protobuf.ByteString getFamilyName() { - return familyName_; + ensureStoresIsMutable(); + stores_.add(value); + onChanged(); + } else { + storesBuilder_.addMessage(value); } - /** - * required bytes family_name = 1; - */ - public Builder setFamilyName(com.google.protobuf.ByteString value) { + return this; + } + /** + * repeated .StoreDescriptor stores = 5; + */ + public Builder addStores( + int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor value) { + if (storesBuilder_ == null) { if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - familyName_ = value; + throw new NullPointerException(); + } + ensureStoresIsMutable(); + stores_.add(index, value); onChanged(); - return this; + } else { + storesBuilder_.addMessage(index, value); } - /** - * required bytes family_name = 1; - */ - public Builder clearFamilyName() { - bitField0_ = (bitField0_ & ~0x00000001); - familyName_ = getDefaultInstance().getFamilyName(); + return this; + } + /** + * repeated .StoreDescriptor stores = 5; + */ + public Builder addStores( + org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder builderForValue) { + if (storesBuilder_ == null) { + ensureStoresIsMutable(); + stores_.add(builderForValue.build()); onChanged(); - return this; - } - - // required string store_home_dir = 2; - private java.lang.Object storeHomeDir_ = ""; - /** - * required string store_home_dir = 2; - * - *
-         *relative to region dir
-         * 
- */ - public boolean hasStoreHomeDir() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * required string store_home_dir = 2; - * - *
-         *relative to region dir
-         * 
- */ - public java.lang.String getStoreHomeDir() { - java.lang.Object ref = storeHomeDir_; - if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - storeHomeDir_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * required string store_home_dir = 2; - * - *
-         *relative to region dir
-         * 
- */ - public com.google.protobuf.ByteString - getStoreHomeDirBytes() { - java.lang.Object ref = storeHomeDir_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - storeHomeDir_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } + } else { + storesBuilder_.addMessage(builderForValue.build()); } - /** - * required string store_home_dir = 2; - * - *
-         *relative to region dir
-         * 
- */ - public Builder setStoreHomeDir( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - storeHomeDir_ = value; + return this; + } + /** + * repeated .StoreDescriptor stores = 5; + */ + public Builder addStores( + int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder builderForValue) { + if (storesBuilder_ == null) { + ensureStoresIsMutable(); + stores_.add(index, builderForValue.build()); onChanged(); - return this; + } else { + storesBuilder_.addMessage(index, builderForValue.build()); } - /** - * required string store_home_dir = 2; - * - *
-         *relative to region dir
-         * 
- */ - public Builder clearStoreHomeDir() { - bitField0_ = (bitField0_ & ~0x00000002); - storeHomeDir_ = getDefaultInstance().getStoreHomeDir(); + return this; + } + /** + * repeated .StoreDescriptor stores = 5; + */ + public Builder addAllStores( + java.lang.Iterable values) { + if (storesBuilder_ == null) { + ensureStoresIsMutable(); + super.addAll(values, stores_); onChanged(); - return this; + } else { + storesBuilder_.addAllMessages(values); } - /** - * required string store_home_dir = 2; - * - *
-         *relative to region dir
-         * 
- */ - public Builder setStoreHomeDirBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - storeHomeDir_ = value; + return this; + } + /** + * repeated .StoreDescriptor stores = 5; + */ + public Builder clearStores() { + if (storesBuilder_ == null) { + stores_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000010); onChanged(); - return this; + } else { + storesBuilder_.clear(); } - - // repeated string store_file = 3; - private com.google.protobuf.LazyStringList storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY; - private void ensureStoreFileIsMutable() { - if (!((bitField0_ & 0x00000004) == 0x00000004)) { - storeFile_ = new com.google.protobuf.LazyStringArrayList(storeFile_); - bitField0_ |= 0x00000004; - } + return this; + } + /** + * repeated .StoreDescriptor stores = 5; + */ + public Builder removeStores(int index) { + if (storesBuilder_ == null) { + ensureStoresIsMutable(); + stores_.remove(index); + onChanged(); + } else { + storesBuilder_.remove(index); } - /** - * repeated string store_file = 3; - * - *
-         * relative to store dir
-         * 
- */ - public java.util.List - getStoreFileList() { - return java.util.Collections.unmodifiableList(storeFile_); + return this; + } + /** + * repeated .StoreDescriptor stores = 5; + */ + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder getStoresBuilder( + int index) { + return getStoresFieldBuilder().getBuilder(index); + } + /** + * repeated .StoreDescriptor stores = 5; + */ + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder getStoresOrBuilder( + int index) { + if (storesBuilder_ == null) { + return stores_.get(index); } else { + return storesBuilder_.getMessageOrBuilder(index); } - /** - * repeated string store_file = 3; - * - *
-         * relative to store dir
-         * 
- */ - public int getStoreFileCount() { - return storeFile_.size(); + } + /** + * repeated .StoreDescriptor stores = 5; + */ + public java.util.List + getStoresOrBuilderList() { + if (storesBuilder_ != null) { + return storesBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(stores_); } - /** - * repeated string store_file = 3; - * - *
-         * relative to store dir
-         * 
- */ - public java.lang.String getStoreFile(int index) { - return storeFile_.get(index); + } + /** + * repeated .StoreDescriptor stores = 5; + */ + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder addStoresBuilder() { + return getStoresFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.getDefaultInstance()); + } + /** + * repeated .StoreDescriptor stores = 5; + */ + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder addStoresBuilder( + int index) { + return getStoresFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.getDefaultInstance()); + } + /** + * repeated .StoreDescriptor stores = 5; + */ + public java.util.List + getStoresBuilderList() { + return getStoresFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder> + getStoresFieldBuilder() { + if (storesBuilder_ == null) { + storesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder>( + stores_, + ((bitField0_ & 0x00000010) == 0x00000010), + getParentForChildren(), + isClean()); + stores_ = null; } - /** - * repeated string store_file = 3; - * - *
-         * relative to store dir
-         * 
- */ - public com.google.protobuf.ByteString - getStoreFileBytes(int index) { - return storeFile_.getByteString(index); + return storesBuilder_; + } + + // optional .ServerName server = 6; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverBuilder_; + /** + * optional .ServerName server = 6; + * + *
+       * Server who opened the region
+       * 
+ */ + public boolean hasServer() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + /** + * optional .ServerName server = 6; + * + *
+       * Server who opened the region
+       * 
+ */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer() { + if (serverBuilder_ == null) { + return server_; + } else { + return serverBuilder_.getMessage(); } - /** - * repeated string store_file = 3; - * - *
-         * relative to store dir
-         * 
- */ - public Builder setStoreFile( - int index, java.lang.String value) { + } + /** + * optional .ServerName server = 6; + * + *
+       * Server who opened the region
+       * 
+ */ + public Builder setServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { + if (serverBuilder_ == null) { if (value == null) { - throw new NullPointerException(); - } - ensureStoreFileIsMutable(); - storeFile_.set(index, value); + throw new NullPointerException(); + } + server_ = value; onChanged(); - return this; + } else { + serverBuilder_.setMessage(value); } - /** - * repeated string store_file = 3; - * - *
-         * relative to store dir
-         * 
- */ - public Builder addStoreFile( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - ensureStoreFileIsMutable(); - storeFile_.add(value); + bitField0_ |= 0x00000020; + return this; + } + /** + * optional .ServerName server = 6; + * + *
+       * Server who opened the region
+       * 
+ */ + public Builder setServer( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { + if (serverBuilder_ == null) { + server_ = builderForValue.build(); onChanged(); - return this; + } else { + serverBuilder_.setMessage(builderForValue.build()); } - /** - * repeated string store_file = 3; - * - *
-         * relative to store dir
-         * 
- */ - public Builder addAllStoreFile( - java.lang.Iterable values) { - ensureStoreFileIsMutable(); - super.addAll(values, storeFile_); + bitField0_ |= 0x00000020; + return this; + } + /** + * optional .ServerName server = 6; + * + *
+       * Server who opened the region
+       * 
+ */ + public Builder mergeServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { + if (serverBuilder_ == null) { + if (((bitField0_ & 0x00000020) == 0x00000020) && + server_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { + server_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(server_).mergeFrom(value).buildPartial(); + } else { + server_ = value; + } + onChanged(); + } else { + serverBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000020; + return this; + } + /** + * optional .ServerName server = 6; + * + *
+       * Server who opened the region
+       * 
+ */ + public Builder clearServer() { + if (serverBuilder_ == null) { + server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); onChanged(); - return this; + } else { + serverBuilder_.clear(); } - /** - * repeated string store_file = 3; - * - *
-         * relative to store dir
-         * 
- */ - public Builder clearStoreFile() { - storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY; - bitField0_ = (bitField0_ & ~0x00000004); - onChanged(); - return this; + bitField0_ = (bitField0_ & ~0x00000020); + return this; + } + /** + * optional .ServerName server = 6; + * + *
+       * Server who opened the region
+       * 
+ */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getServerBuilder() { + bitField0_ |= 0x00000020; + onChanged(); + return getServerFieldBuilder().getBuilder(); + } + /** + * optional .ServerName server = 6; + * + *
+       * Server who opened the region
+       * 
+ */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() { + if (serverBuilder_ != null) { + return serverBuilder_.getMessageOrBuilder(); + } else { + return server_; } - /** - * repeated string store_file = 3; - * - *
-         * relative to store dir
-         * 
- */ - public Builder addStoreFileBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - ensureStoreFileIsMutable(); - storeFile_.add(value); - onChanged(); - return this; + } + /** + * optional .ServerName server = 6; + * + *
+       * Server who opened the region
+       * 
+ */ + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> + getServerFieldBuilder() { + if (serverBuilder_ == null) { + serverBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( + server_, + getParentForChildren(), + isClean()); + server_ = null; } - - // @@protoc_insertion_point(builder_scope:RegionEventDescriptor.StoreDescriptor) + return serverBuilder_; } - static { - defaultInstance = new StoreDescriptor(true); - defaultInstance.initFields(); - } + // @@protoc_insertion_point(builder_scope:RegionEventDescriptor) + } - // @@protoc_insertion_point(class_scope:RegionEventDescriptor.StoreDescriptor) + static { + defaultInstance = new RegionEventDescriptor(true); + defaultInstance.initFields(); } - private int bitField0_; - // required .RegionEventDescriptor.EventType event_type = 1; - public static final int EVENT_TYPE_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType eventType_; + // @@protoc_insertion_point(class_scope:RegionEventDescriptor) + } + + public interface BulkLoadDescriptorOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .TableName table_name = 1; /** - * required .RegionEventDescriptor.EventType event_type = 1; + * required .TableName table_name = 1; */ - public boolean hasEventType() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } + boolean hasTableName(); /** - * required .RegionEventDescriptor.EventType event_type = 1; + * required .TableName table_name = 1; */ - public org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType getEventType() { - return eventType_; - } + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName(); + /** + * required .TableName table_name = 1; + */ + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(); - // required bytes table_name = 2; - public static final int TABLE_NAME_FIELD_NUMBER = 2; - private com.google.protobuf.ByteString tableName_; + // required bytes encoded_region_name = 2; /** - * required bytes table_name = 2; + * required bytes encoded_region_name = 2; */ - public boolean hasTableName() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } + boolean hasEncodedRegionName(); /** - * required bytes table_name = 2; + * required bytes encoded_region_name = 2; */ - public com.google.protobuf.ByteString getTableName() { - return tableName_; + com.google.protobuf.ByteString getEncodedRegionName(); + + // repeated .StoreDescriptor stores = 3; + /** + * repeated .StoreDescriptor stores = 3; + */ + java.util.List + getStoresList(); + /** + * repeated .StoreDescriptor stores = 3; + */ + org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor getStores(int index); + /** + * repeated .StoreDescriptor stores = 3; + */ + int getStoresCount(); + /** + * repeated .StoreDescriptor stores = 3; + */ + java.util.List + getStoresOrBuilderList(); + /** + * repeated .StoreDescriptor stores = 3; + */ + org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder getStoresOrBuilder( + int index); + + // required int64 bulkload_seq_num = 4; + /** + * required int64 bulkload_seq_num = 4; + */ + boolean hasBulkloadSeqNum(); + /** + * required int64 bulkload_seq_num = 4; + */ + long getBulkloadSeqNum(); + } + /** + * Protobuf type {@code BulkLoadDescriptor} + * + *
+   **
+   * Special WAL entry used for HBASE-11567 (Write bulk load events to WAL)
+   * 
+ */ + public static final class BulkLoadDescriptor extends + com.google.protobuf.GeneratedMessage + implements BulkLoadDescriptorOrBuilder { + // Use BulkLoadDescriptor.newBuilder() to construct. + private BulkLoadDescriptor(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); } + private BulkLoadDescriptor(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - // required bytes encoded_region_name = 3; - public static final int ENCODED_REGION_NAME_FIELD_NUMBER = 3; - private com.google.protobuf.ByteString encodedRegionName_; + private static final BulkLoadDescriptor defaultInstance; + public static BulkLoadDescriptor getDefaultInstance() { + return defaultInstance; + } + + public BulkLoadDescriptor getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private BulkLoadDescriptor( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = tableName_.toBuilder(); + } + tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(tableName_); + tableName_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + case 18: { + bitField0_ |= 0x00000002; + encodedRegionName_ = input.readBytes(); + break; + } + case 26: { + if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { + stores_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000004; + } + stores_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.PARSER, extensionRegistry)); + break; + } + case 32: { + bitField0_ |= 0x00000004; + bulkloadSeqNum_ = input.readInt64(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { + stores_ = java.util.Collections.unmodifiableList(stores_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_BulkLoadDescriptor_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_BulkLoadDescriptor_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public BulkLoadDescriptor parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new BulkLoadDescriptor(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // required .TableName table_name = 1; + public static final int TABLE_NAME_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_; /** - * required bytes encoded_region_name = 3; + * required .TableName table_name = 1; */ - public boolean hasEncodedRegionName() { - return ((bitField0_ & 0x00000004) == 0x00000004); + public boolean hasTableName() { + return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * required bytes encoded_region_name = 3; + * required .TableName table_name = 1; */ - public com.google.protobuf.ByteString getEncodedRegionName() { - return encodedRegionName_; + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() { + return tableName_; } - - // optional uint64 log_sequence_number = 4; - public static final int LOG_SEQUENCE_NUMBER_FIELD_NUMBER = 4; - private long logSequenceNumber_; /** - * optional uint64 log_sequence_number = 4; + * required .TableName table_name = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { + return tableName_; + } + + // required bytes encoded_region_name = 2; + public static final int ENCODED_REGION_NAME_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString encodedRegionName_; + /** + * required bytes encoded_region_name = 2; */ - public boolean hasLogSequenceNumber() { - return ((bitField0_ & 0x00000008) == 0x00000008); + public boolean hasEncodedRegionName() { + return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * optional uint64 log_sequence_number = 4; + * required bytes encoded_region_name = 2; */ - public long getLogSequenceNumber() { - return logSequenceNumber_; + public com.google.protobuf.ByteString getEncodedRegionName() { + return encodedRegionName_; } - // repeated .RegionEventDescriptor.StoreDescriptor stores = 5; - public static final int STORES_FIELD_NUMBER = 5; - private java.util.List stores_; + // repeated .StoreDescriptor stores = 3; + public static final int STORES_FIELD_NUMBER = 3; + private java.util.List stores_; /** - * repeated .RegionEventDescriptor.StoreDescriptor stores = 5; + * repeated .StoreDescriptor stores = 3; */ - public java.util.List getStoresList() { + public java.util.List getStoresList() { return stores_; } /** - * repeated .RegionEventDescriptor.StoreDescriptor stores = 5; + * repeated .StoreDescriptor stores = 3; */ - public java.util.List + public java.util.List getStoresOrBuilderList() { return stores_; } /** - * repeated .RegionEventDescriptor.StoreDescriptor stores = 5; + * repeated .StoreDescriptor stores = 3; */ public int getStoresCount() { return stores_.size(); } /** - * repeated .RegionEventDescriptor.StoreDescriptor stores = 5; + * repeated .StoreDescriptor stores = 3; */ - public org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor getStores(int index) { + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor getStores(int index) { return stores_.get(index); } /** - * repeated .RegionEventDescriptor.StoreDescriptor stores = 5; + * repeated .StoreDescriptor stores = 3; */ - public org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptorOrBuilder getStoresOrBuilder( + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder getStoresOrBuilder( int index) { return stores_.get(index); } - // optional .ServerName server = 6; - public static final int SERVER_FIELD_NUMBER = 6; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName server_; - /** - * optional .ServerName server = 6; - * - *
-     * Server who opened the region
-     * 
- */ - public boolean hasServer() { - return ((bitField0_ & 0x00000010) == 0x00000010); - } + // required int64 bulkload_seq_num = 4; + public static final int BULKLOAD_SEQ_NUM_FIELD_NUMBER = 4; + private long bulkloadSeqNum_; /** - * optional .ServerName server = 6; - * - *
-     * Server who opened the region
-     * 
+ * required int64 bulkload_seq_num = 4; */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer() { - return server_; + public boolean hasBulkloadSeqNum() { + return ((bitField0_ & 0x00000004) == 0x00000004); } /** - * optional .ServerName server = 6; - * - *
-     * Server who opened the region
-     * 
+ * required int64 bulkload_seq_num = 4; */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() { - return server_; + public long getBulkloadSeqNum() { + return bulkloadSeqNum_; } private void initFields() { - eventType_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType.REGION_OPEN; - tableName_ = com.google.protobuf.ByteString.EMPTY; + tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; - logSequenceNumber_ = 0L; stores_ = java.util.Collections.emptyList(); - server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + bulkloadSeqNum_ = 0L; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - if (!hasEventType()) { + if (!hasTableName()) { memoizedIsInitialized = 0; return false; } - if (!hasTableName()) { + if (!hasEncodedRegionName()) { memoizedIsInitialized = 0; return false; } - if (!hasEncodedRegionName()) { + if (!hasBulkloadSeqNum()) { + memoizedIsInitialized = 0; + return false; + } + if (!getTableName().isInitialized()) { memoizedIsInitialized = 0; return false; } @@ -9012,12 +10328,6 @@ public final class WALProtos { return false; } } - if (hasServer()) { - if (!getServer().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } memoizedIsInitialized = 1; return true; } @@ -9026,22 +10336,16 @@ public final class WALProtos { throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeEnum(1, eventType_.getNumber()); + output.writeMessage(1, tableName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, tableName_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeBytes(3, encodedRegionName_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeUInt64(4, logSequenceNumber_); + output.writeBytes(2, encodedRegionName_); } for (int i = 0; i < stores_.size(); i++) { - output.writeMessage(5, stores_.get(i)); + output.writeMessage(3, stores_.get(i)); } - if (((bitField0_ & 0x00000010) == 0x00000010)) { - output.writeMessage(6, server_); + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeInt64(4, bulkloadSeqNum_); } getUnknownFields().writeTo(output); } @@ -9054,27 +10358,19 @@ public final class WALProtos { size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeEnumSize(1, eventType_.getNumber()); + .computeMessageSize(1, tableName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, tableName_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(3, encodedRegionName_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(4, logSequenceNumber_); + .computeBytesSize(2, encodedRegionName_); } for (int i = 0; i < stores_.size(); i++) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(5, stores_.get(i)); + .computeMessageSize(3, stores_.get(i)); } - if (((bitField0_ & 0x00000010) == 0x00000010)) { + if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(6, server_); + .computeInt64Size(4, bulkloadSeqNum_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; @@ -9093,17 +10389,12 @@ public final class WALProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor other = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor) obj; + org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor other = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor) obj; boolean result = true; - result = result && (hasEventType() == other.hasEventType()); - if (hasEventType()) { - result = result && - (getEventType() == other.getEventType()); - } result = result && (hasTableName() == other.hasTableName()); if (hasTableName()) { result = result && getTableName() @@ -9114,17 +10405,12 @@ public final class WALProtos { result = result && getEncodedRegionName() .equals(other.getEncodedRegionName()); } - result = result && (hasLogSequenceNumber() == other.hasLogSequenceNumber()); - if (hasLogSequenceNumber()) { - result = result && (getLogSequenceNumber() - == other.getLogSequenceNumber()); - } result = result && getStoresList() .equals(other.getStoresList()); - result = result && (hasServer() == other.hasServer()); - if (hasServer()) { - result = result && getServer() - .equals(other.getServer()); + result = result && (hasBulkloadSeqNum() == other.hasBulkloadSeqNum()); + if (hasBulkloadSeqNum()) { + result = result && (getBulkloadSeqNum() + == other.getBulkloadSeqNum()); } result = result && getUnknownFields().equals(other.getUnknownFields()); @@ -9139,10 +10425,6 @@ public final class WALProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasEventType()) { - hash = (37 * hash) + EVENT_TYPE_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getEventType()); - } if (hasTableName()) { hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER; hash = (53 * hash) + getTableName().hashCode(); @@ -9151,70 +10433,66 @@ public final class WALProtos { hash = (37 * hash) + ENCODED_REGION_NAME_FIELD_NUMBER; hash = (53 * hash) + getEncodedRegionName().hashCode(); } - if (hasLogSequenceNumber()) { - hash = (37 * hash) + LOG_SEQUENCE_NUMBER_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getLogSequenceNumber()); - } if (getStoresCount() > 0) { hash = (37 * hash) + STORES_FIELD_NUMBER; hash = (53 * hash) + getStoresList().hashCode(); } - if (hasServer()) { - hash = (37 * hash) + SERVER_FIELD_NUMBER; - hash = (53 * hash) + getServer().hashCode(); + if (hasBulkloadSeqNum()) { + hash = (37 * hash) + BULKLOAD_SEQ_NUM_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getBulkloadSeqNum()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -9223,7 +10501,7 @@ public final class WALProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -9235,29 +10513,29 @@ public final class WALProtos { return builder; } /** - * Protobuf type {@code RegionEventDescriptor} + * Protobuf type {@code BulkLoadDescriptor} * *
      **
-     * Special WAL entry to hold all related to a region event (open/close).
+     * Special WAL entry used for HBASE-11567 (Write bulk load events to WAL)
      * 
*/ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptorOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptorOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_RegionEventDescriptor_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_BulkLoadDescriptor_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_RegionEventDescriptor_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_BulkLoadDescriptor_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor.Builder.class); } - // Construct using org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -9269,8 +10547,8 @@ public final class WALProtos { } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getTableNameFieldBuilder(); getStoresFieldBuilder(); - getServerFieldBuilder(); } } private static Builder create() { @@ -9279,26 +10557,22 @@ public final class WALProtos { public Builder clear() { super.clear(); - eventType_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType.REGION_OPEN; + if (tableNameBuilder_ == null) { + tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + } else { + tableNameBuilder_.clear(); + } bitField0_ = (bitField0_ & ~0x00000001); - tableName_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000002); encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000004); - logSequenceNumber_ = 0L; - bitField0_ = (bitField0_ & ~0x00000008); + bitField0_ = (bitField0_ & ~0x00000002); if (storesBuilder_ == null) { stores_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000010); + bitField0_ = (bitField0_ & ~0x00000004); } else { storesBuilder_.clear(); } - if (serverBuilder_ == null) { - server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - } else { - serverBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000020); + bulkloadSeqNum_ = 0L; + bitField0_ = (bitField0_ & ~0x00000008); return this; } @@ -9308,91 +10582,77 @@ public final class WALProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_RegionEventDescriptor_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_BulkLoadDescriptor_descriptor; } - public org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor build() { - org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor build() { + org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - public org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor(this); + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } - result.eventType_ = eventType_; + if (tableNameBuilder_ == null) { + result.tableName_ = tableName_; + } else { + result.tableName_ = tableNameBuilder_.build(); + } if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } - result.tableName_ = tableName_; - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000004; - } result.encodedRegionName_ = encodedRegionName_; - if (((from_bitField0_ & 0x00000008) == 0x00000008)) { - to_bitField0_ |= 0x00000008; - } - result.logSequenceNumber_ = logSequenceNumber_; if (storesBuilder_ == null) { - if (((bitField0_ & 0x00000010) == 0x00000010)) { + if (((bitField0_ & 0x00000004) == 0x00000004)) { stores_ = java.util.Collections.unmodifiableList(stores_); - bitField0_ = (bitField0_ & ~0x00000010); + bitField0_ = (bitField0_ & ~0x00000004); } result.stores_ = stores_; } else { result.stores_ = storesBuilder_.build(); } - if (((from_bitField0_ & 0x00000020) == 0x00000020)) { - to_bitField0_ |= 0x00000010; - } - if (serverBuilder_ == null) { - result.server_ = server_; - } else { - result.server_ = serverBuilder_.build(); + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000004; } + result.bulkloadSeqNum_ = bulkloadSeqNum_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.getDefaultInstance()) return this; - if (other.hasEventType()) { - setEventType(other.getEventType()); - } + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor.getDefaultInstance()) return this; if (other.hasTableName()) { - setTableName(other.getTableName()); + mergeTableName(other.getTableName()); } if (other.hasEncodedRegionName()) { setEncodedRegionName(other.getEncodedRegionName()); } - if (other.hasLogSequenceNumber()) { - setLogSequenceNumber(other.getLogSequenceNumber()); - } if (storesBuilder_ == null) { if (!other.stores_.isEmpty()) { if (stores_.isEmpty()) { stores_ = other.stores_; - bitField0_ = (bitField0_ & ~0x00000010); + bitField0_ = (bitField0_ & ~0x00000004); } else { ensureStoresIsMutable(); stores_.addAll(other.stores_); @@ -9405,7 +10665,7 @@ public final class WALProtos { storesBuilder_.dispose(); storesBuilder_ = null; stores_ = other.stores_; - bitField0_ = (bitField0_ & ~0x00000010); + bitField0_ = (bitField0_ & ~0x00000004); storesBuilder_ = com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? getStoresFieldBuilder() : null; @@ -9414,23 +10674,27 @@ public final class WALProtos { } } } - if (other.hasServer()) { - mergeServer(other.getServer()); + if (other.hasBulkloadSeqNum()) { + setBulkloadSeqNum(other.getBulkloadSeqNum()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { - if (!hasEventType()) { + if (!hasTableName()) { return false; } - if (!hasTableName()) { + if (!hasEncodedRegionName()) { return false; } - if (!hasEncodedRegionName()) { + if (!hasBulkloadSeqNum()) { + + return false; + } + if (!getTableName().isInitialized()) { return false; } @@ -9440,12 +10704,6 @@ public final class WALProtos { return false; } } - if (hasServer()) { - if (!getServer().isInitialized()) { - - return false; - } - } return true; } @@ -9453,11 +10711,11 @@ public final class WALProtos { com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parsedMessage = null; + org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor) e.getUnfinishedMessage(); + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { @@ -9468,164 +10726,176 @@ public final class WALProtos { } private int bitField0_; - // required .RegionEventDescriptor.EventType event_type = 1; - private org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType eventType_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType.REGION_OPEN; + // required .TableName table_name = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_; /** - * required .RegionEventDescriptor.EventType event_type = 1; + * required .TableName table_name = 1; */ - public boolean hasEventType() { + public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * required .RegionEventDescriptor.EventType event_type = 1; + * required .TableName table_name = 1; */ - public org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType getEventType() { - return eventType_; + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() { + if (tableNameBuilder_ == null) { + return tableName_; + } else { + return tableNameBuilder_.getMessage(); + } } /** - * required .RegionEventDescriptor.EventType event_type = 1; + * required .TableName table_name = 1; */ - public Builder setEventType(org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType value) { - if (value == null) { - throw new NullPointerException(); + public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) { + if (tableNameBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + tableName_ = value; + onChanged(); + } else { + tableNameBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * required .TableName table_name = 1; + */ + public Builder setTableName( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) { + if (tableNameBuilder_ == null) { + tableName_ = builderForValue.build(); + onChanged(); + } else { + tableNameBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * required .TableName table_name = 1; + */ + public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) { + if (tableNameBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) { + tableName_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial(); + } else { + tableName_ = value; + } + onChanged(); + } else { + tableNameBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; - eventType_ = value; - onChanged(); return this; } /** - * required .RegionEventDescriptor.EventType event_type = 1; + * required .TableName table_name = 1; */ - public Builder clearEventType() { + public Builder clearTableName() { + if (tableNameBuilder_ == null) { + tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + onChanged(); + } else { + tableNameBuilder_.clear(); + } bitField0_ = (bitField0_ & ~0x00000001); - eventType_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType.REGION_OPEN; - onChanged(); return this; } - - // required bytes table_name = 2; - private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; - /** - * required bytes table_name = 2; - */ - public boolean hasTableName() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } /** - * required bytes table_name = 2; + * required .TableName table_name = 1; */ - public com.google.protobuf.ByteString getTableName() { - return tableName_; + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getTableNameFieldBuilder().getBuilder(); } /** - * required bytes table_name = 2; + * required .TableName table_name = 1; */ - public Builder setTableName(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - tableName_ = value; - onChanged(); - return this; + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { + if (tableNameBuilder_ != null) { + return tableNameBuilder_.getMessageOrBuilder(); + } else { + return tableName_; + } } /** - * required bytes table_name = 2; + * required .TableName table_name = 1; */ - public Builder clearTableName() { - bitField0_ = (bitField0_ & ~0x00000002); - tableName_ = getDefaultInstance().getTableName(); - onChanged(); - return this; + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> + getTableNameFieldBuilder() { + if (tableNameBuilder_ == null) { + tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>( + tableName_, + getParentForChildren(), + isClean()); + tableName_ = null; + } + return tableNameBuilder_; } - // required bytes encoded_region_name = 3; + // required bytes encoded_region_name = 2; private com.google.protobuf.ByteString encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; /** - * required bytes encoded_region_name = 3; + * required bytes encoded_region_name = 2; */ public boolean hasEncodedRegionName() { - return ((bitField0_ & 0x00000004) == 0x00000004); + return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * required bytes encoded_region_name = 3; + * required bytes encoded_region_name = 2; */ public com.google.protobuf.ByteString getEncodedRegionName() { return encodedRegionName_; } /** - * required bytes encoded_region_name = 3; + * required bytes encoded_region_name = 2; */ public Builder setEncodedRegionName(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } - bitField0_ |= 0x00000004; + bitField0_ |= 0x00000002; encodedRegionName_ = value; onChanged(); return this; } /** - * required bytes encoded_region_name = 3; + * required bytes encoded_region_name = 2; */ public Builder clearEncodedRegionName() { - bitField0_ = (bitField0_ & ~0x00000004); + bitField0_ = (bitField0_ & ~0x00000002); encodedRegionName_ = getDefaultInstance().getEncodedRegionName(); onChanged(); return this; } - // optional uint64 log_sequence_number = 4; - private long logSequenceNumber_ ; - /** - * optional uint64 log_sequence_number = 4; - */ - public boolean hasLogSequenceNumber() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - /** - * optional uint64 log_sequence_number = 4; - */ - public long getLogSequenceNumber() { - return logSequenceNumber_; - } - /** - * optional uint64 log_sequence_number = 4; - */ - public Builder setLogSequenceNumber(long value) { - bitField0_ |= 0x00000008; - logSequenceNumber_ = value; - onChanged(); - return this; - } - /** - * optional uint64 log_sequence_number = 4; - */ - public Builder clearLogSequenceNumber() { - bitField0_ = (bitField0_ & ~0x00000008); - logSequenceNumber_ = 0L; - onChanged(); - return this; - } - - // repeated .RegionEventDescriptor.StoreDescriptor stores = 5; - private java.util.List stores_ = + // repeated .StoreDescriptor stores = 3; + private java.util.List stores_ = java.util.Collections.emptyList(); private void ensureStoresIsMutable() { - if (!((bitField0_ & 0x00000010) == 0x00000010)) { - stores_ = new java.util.ArrayList(stores_); - bitField0_ |= 0x00000010; + if (!((bitField0_ & 0x00000004) == 0x00000004)) { + stores_ = new java.util.ArrayList(stores_); + bitField0_ |= 0x00000004; } } private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor, org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptorOrBuilder> storesBuilder_; + org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder> storesBuilder_; /** - * repeated .RegionEventDescriptor.StoreDescriptor stores = 5; + * repeated .StoreDescriptor stores = 3; */ - public java.util.List getStoresList() { + public java.util.List getStoresList() { if (storesBuilder_ == null) { return java.util.Collections.unmodifiableList(stores_); } else { @@ -9633,7 +10903,7 @@ public final class WALProtos { } } /** - * repeated .RegionEventDescriptor.StoreDescriptor stores = 5; + * repeated .StoreDescriptor stores = 3; */ public int getStoresCount() { if (storesBuilder_ == null) { @@ -9643,9 +10913,9 @@ public final class WALProtos { } } /** - * repeated .RegionEventDescriptor.StoreDescriptor stores = 5; + * repeated .StoreDescriptor stores = 3; */ - public org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor getStores(int index) { + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor getStores(int index) { if (storesBuilder_ == null) { return stores_.get(index); } else { @@ -9653,10 +10923,10 @@ public final class WALProtos { } } /** - * repeated .RegionEventDescriptor.StoreDescriptor stores = 5; + * repeated .StoreDescriptor stores = 3; */ public Builder setStores( - int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor value) { + int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor value) { if (storesBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -9670,10 +10940,10 @@ public final class WALProtos { return this; } /** - * repeated .RegionEventDescriptor.StoreDescriptor stores = 5; + * repeated .StoreDescriptor stores = 3; */ public Builder setStores( - int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor.Builder builderForValue) { + int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder builderForValue) { if (storesBuilder_ == null) { ensureStoresIsMutable(); stores_.set(index, builderForValue.build()); @@ -9684,9 +10954,9 @@ public final class WALProtos { return this; } /** - * repeated .RegionEventDescriptor.StoreDescriptor stores = 5; + * repeated .StoreDescriptor stores = 3; */ - public Builder addStores(org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor value) { + public Builder addStores(org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor value) { if (storesBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -9700,10 +10970,10 @@ public final class WALProtos { return this; } /** - * repeated .RegionEventDescriptor.StoreDescriptor stores = 5; + * repeated .StoreDescriptor stores = 3; */ public Builder addStores( - int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor value) { + int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor value) { if (storesBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -9717,10 +10987,10 @@ public final class WALProtos { return this; } /** - * repeated .RegionEventDescriptor.StoreDescriptor stores = 5; + * repeated .StoreDescriptor stores = 3; */ public Builder addStores( - org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor.Builder builderForValue) { + org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder builderForValue) { if (storesBuilder_ == null) { ensureStoresIsMutable(); stores_.add(builderForValue.build()); @@ -9731,10 +11001,10 @@ public final class WALProtos { return this; } /** - * repeated .RegionEventDescriptor.StoreDescriptor stores = 5; + * repeated .StoreDescriptor stores = 3; */ public Builder addStores( - int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor.Builder builderForValue) { + int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder builderForValue) { if (storesBuilder_ == null) { ensureStoresIsMutable(); stores_.add(index, builderForValue.build()); @@ -9745,10 +11015,10 @@ public final class WALProtos { return this; } /** - * repeated .RegionEventDescriptor.StoreDescriptor stores = 5; + * repeated .StoreDescriptor stores = 3; */ public Builder addAllStores( - java.lang.Iterable values) { + java.lang.Iterable values) { if (storesBuilder_ == null) { ensureStoresIsMutable(); super.addAll(values, stores_); @@ -9759,12 +11029,12 @@ public final class WALProtos { return this; } /** - * repeated .RegionEventDescriptor.StoreDescriptor stores = 5; + * repeated .StoreDescriptor stores = 3; */ public Builder clearStores() { if (storesBuilder_ == null) { stores_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000010); + bitField0_ = (bitField0_ & ~0x00000004); onChanged(); } else { storesBuilder_.clear(); @@ -9772,7 +11042,7 @@ public final class WALProtos { return this; } /** - * repeated .RegionEventDescriptor.StoreDescriptor stores = 5; + * repeated .StoreDescriptor stores = 3; */ public Builder removeStores(int index) { if (storesBuilder_ == null) { @@ -9785,16 +11055,16 @@ public final class WALProtos { return this; } /** - * repeated .RegionEventDescriptor.StoreDescriptor stores = 5; + * repeated .StoreDescriptor stores = 3; */ - public org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor.Builder getStoresBuilder( + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder getStoresBuilder( int index) { return getStoresFieldBuilder().getBuilder(index); } /** - * repeated .RegionEventDescriptor.StoreDescriptor stores = 5; + * repeated .StoreDescriptor stores = 3; */ - public org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptorOrBuilder getStoresOrBuilder( + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder getStoresOrBuilder( int index) { if (storesBuilder_ == null) { return stores_.get(index); } else { @@ -9802,9 +11072,9 @@ public final class WALProtos { } } /** - * repeated .RegionEventDescriptor.StoreDescriptor stores = 5; + * repeated .StoreDescriptor stores = 3; */ - public java.util.List + public java.util.List getStoresOrBuilderList() { if (storesBuilder_ != null) { return storesBuilder_.getMessageOrBuilderList(); @@ -9813,35 +11083,35 @@ public final class WALProtos { } } /** - * repeated .RegionEventDescriptor.StoreDescriptor stores = 5; + * repeated .StoreDescriptor stores = 3; */ - public org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor.Builder addStoresBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder addStoresBuilder() { return getStoresFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor.getDefaultInstance()); + org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.getDefaultInstance()); } /** - * repeated .RegionEventDescriptor.StoreDescriptor stores = 5; + * repeated .StoreDescriptor stores = 3; */ - public org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor.Builder addStoresBuilder( + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder addStoresBuilder( int index) { return getStoresFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor.getDefaultInstance()); + index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.getDefaultInstance()); } /** - * repeated .RegionEventDescriptor.StoreDescriptor stores = 5; + * repeated .StoreDescriptor stores = 3; */ - public java.util.List + public java.util.List getStoresBuilderList() { return getStoresFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor, org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptorOrBuilder> + org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder> getStoresFieldBuilder() { if (storesBuilder_ == null) { storesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor, org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptorOrBuilder>( + org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder>( stores_, - ((bitField0_ & 0x00000010) == 0x00000010), + ((bitField0_ & 0x00000004) == 0x00000004), getParentForChildren(), isClean()); stores_ = null; @@ -9849,168 +11119,48 @@ public final class WALProtos { return storesBuilder_; } - // optional .ServerName server = 6; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverBuilder_; - /** - * optional .ServerName server = 6; - * - *
-       * Server who opened the region
-       * 
- */ - public boolean hasServer() { - return ((bitField0_ & 0x00000020) == 0x00000020); - } - /** - * optional .ServerName server = 6; - * - *
-       * Server who opened the region
-       * 
- */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer() { - if (serverBuilder_ == null) { - return server_; - } else { - return serverBuilder_.getMessage(); - } - } + // required int64 bulkload_seq_num = 4; + private long bulkloadSeqNum_ ; /** - * optional .ServerName server = 6; - * - *
-       * Server who opened the region
-       * 
- */ - public Builder setServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { - if (serverBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - server_ = value; - onChanged(); - } else { - serverBuilder_.setMessage(value); - } - bitField0_ |= 0x00000020; - return this; - } - /** - * optional .ServerName server = 6; - * - *
-       * Server who opened the region
-       * 
+ * required int64 bulkload_seq_num = 4; */ - public Builder setServer( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { - if (serverBuilder_ == null) { - server_ = builderForValue.build(); - onChanged(); - } else { - serverBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000020; - return this; + public boolean hasBulkloadSeqNum() { + return ((bitField0_ & 0x00000008) == 0x00000008); } /** - * optional .ServerName server = 6; - * - *
-       * Server who opened the region
-       * 
+ * required int64 bulkload_seq_num = 4; */ - public Builder mergeServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { - if (serverBuilder_ == null) { - if (((bitField0_ & 0x00000020) == 0x00000020) && - server_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { - server_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(server_).mergeFrom(value).buildPartial(); - } else { - server_ = value; - } - onChanged(); - } else { - serverBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000020; - return this; + public long getBulkloadSeqNum() { + return bulkloadSeqNum_; } /** - * optional .ServerName server = 6; - * - *
-       * Server who opened the region
-       * 
+ * required int64 bulkload_seq_num = 4; */ - public Builder clearServer() { - if (serverBuilder_ == null) { - server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - onChanged(); - } else { - serverBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000020); + public Builder setBulkloadSeqNum(long value) { + bitField0_ |= 0x00000008; + bulkloadSeqNum_ = value; + onChanged(); return this; } /** - * optional .ServerName server = 6; - * - *
-       * Server who opened the region
-       * 
+ * required int64 bulkload_seq_num = 4; */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getServerBuilder() { - bitField0_ |= 0x00000020; + public Builder clearBulkloadSeqNum() { + bitField0_ = (bitField0_ & ~0x00000008); + bulkloadSeqNum_ = 0L; onChanged(); - return getServerFieldBuilder().getBuilder(); - } - /** - * optional .ServerName server = 6; - * - *
-       * Server who opened the region
-       * 
- */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() { - if (serverBuilder_ != null) { - return serverBuilder_.getMessageOrBuilder(); - } else { - return server_; - } - } - /** - * optional .ServerName server = 6; - * - *
-       * Server who opened the region
-       * 
- */ - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> - getServerFieldBuilder() { - if (serverBuilder_ == null) { - serverBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( - server_, - getParentForChildren(), - isClean()); - server_ = null; - } - return serverBuilder_; + return this; } - // @@protoc_insertion_point(builder_scope:RegionEventDescriptor) + // @@protoc_insertion_point(builder_scope:BulkLoadDescriptor) } static { - defaultInstance = new RegionEventDescriptor(true); + defaultInstance = new BulkLoadDescriptor(true); defaultInstance.initFields(); } - // @@protoc_insertion_point(class_scope:RegionEventDescriptor) + // @@protoc_insertion_point(class_scope:BulkLoadDescriptor) } public interface WALTrailerOrBuilder @@ -10394,15 +11544,20 @@ public final class WALProtos { com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_FlushDescriptor_StoreFlushDescriptor_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor + internal_static_StoreDescriptor_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_StoreDescriptor_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor internal_static_RegionEventDescriptor_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_RegionEventDescriptor_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor - internal_static_RegionEventDescriptor_StoreDescriptor_descriptor; + internal_static_BulkLoadDescriptor_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_RegionEventDescriptor_StoreDescriptor_fieldAccessorTable; + internal_static_BulkLoadDescriptor_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_WALTrailer_descriptor; private static @@ -10417,47 +11572,50 @@ public final class WALProtos { descriptor; static { java.lang.String[] descriptorData = { - "\n\tWAL.proto\032\013HBase.proto\"\217\001\n\tWALHeader\022\027" + - "\n\017has_compression\030\001 \001(\010\022\026\n\016encryption_ke" + - "y\030\002 \001(\014\022\033\n\023has_tag_compression\030\003 \001(\010\022\027\n\017" + - "writer_cls_name\030\004 \001(\t\022\033\n\023cell_codec_cls_" + - "name\030\005 \001(\t\"\240\002\n\006WALKey\022\033\n\023encoded_region_" + - "name\030\001 \002(\014\022\022\n\ntable_name\030\002 \002(\014\022\033\n\023log_se" + - "quence_number\030\003 \002(\004\022\022\n\nwrite_time\030\004 \002(\004\022" + - "\035\n\ncluster_id\030\005 \001(\0132\005.UUIDB\002\030\001\022\034\n\006scopes" + - "\030\006 \003(\0132\014.FamilyScope\022\032\n\022following_kv_cou" + - "nt\030\007 \001(\r\022\032\n\013cluster_ids\030\010 \003(\0132\005.UUID\022\022\n\n", - "nonceGroup\030\t \001(\004\022\r\n\005nonce\030\n \001(\004\022\034\n\024orig_" + - "sequence_number\030\013 \001(\004\"=\n\013FamilyScope\022\016\n\006" + - "family\030\001 \002(\014\022\036\n\nscope_type\030\002 \002(\0162\n.Scope" + - "Type\"\276\001\n\024CompactionDescriptor\022\022\n\ntable_n" + - "ame\030\001 \002(\014\022\033\n\023encoded_region_name\030\002 \002(\014\022\023" + - "\n\013family_name\030\003 \002(\014\022\030\n\020compaction_input\030" + - "\004 \003(\t\022\031\n\021compaction_output\030\005 \003(\t\022\026\n\016stor" + - "e_home_dir\030\006 \002(\t\022\023\n\013region_name\030\007 \001(\014\"\353\002" + - "\n\017FlushDescriptor\022,\n\006action\030\001 \002(\0162\034.Flus" + - "hDescriptor.FlushAction\022\022\n\ntable_name\030\002 ", - "\002(\014\022\033\n\023encoded_region_name\030\003 \002(\014\022\035\n\025flus" + - "h_sequence_number\030\004 \001(\004\022<\n\rstore_flushes" + - "\030\005 \003(\0132%.FlushDescriptor.StoreFlushDescr" + - "iptor\032Y\n\024StoreFlushDescriptor\022\023\n\013family_" + - "name\030\001 \002(\014\022\026\n\016store_home_dir\030\002 \002(\t\022\024\n\014fl" + - "ush_output\030\003 \003(\t\"A\n\013FlushAction\022\017\n\013START" + - "_FLUSH\020\000\022\020\n\014COMMIT_FLUSH\020\001\022\017\n\013ABORT_FLUS" + - "H\020\002\"\364\002\n\025RegionEventDescriptor\0224\n\nevent_t" + - "ype\030\001 \002(\0162 .RegionEventDescriptor.EventT" + - "ype\022\022\n\ntable_name\030\002 \002(\014\022\033\n\023encoded_regio", - "n_name\030\003 \002(\014\022\033\n\023log_sequence_number\030\004 \001(" + - "\004\0226\n\006stores\030\005 \003(\0132&.RegionEventDescripto" + - "r.StoreDescriptor\022\033\n\006server\030\006 \001(\0132\013.Serv" + - "erName\032R\n\017StoreDescriptor\022\023\n\013family_name" + - "\030\001 \002(\014\022\026\n\016store_home_dir\030\002 \002(\t\022\022\n\nstore_" + - "file\030\003 \003(\t\".\n\tEventType\022\017\n\013REGION_OPEN\020\000" + - "\022\020\n\014REGION_CLOSE\020\001\"\014\n\nWALTrailer*F\n\tScop" + - "eType\022\033\n\027REPLICATION_SCOPE_LOCAL\020\000\022\034\n\030RE" + - "PLICATION_SCOPE_GLOBAL\020\001B?\n*org.apache.h" + - "adoop.hbase.protobuf.generatedB\tWALProto", - "sH\001\210\001\000\240\001\001" + "\n\tWAL.proto\032\013HBase.proto\032\014Client.proto\"\217" + + "\001\n\tWALHeader\022\027\n\017has_compression\030\001 \001(\010\022\026\n" + + "\016encryption_key\030\002 \001(\014\022\033\n\023has_tag_compres" + + "sion\030\003 \001(\010\022\027\n\017writer_cls_name\030\004 \001(\t\022\033\n\023c" + + "ell_codec_cls_name\030\005 \001(\t\"\240\002\n\006WALKey\022\033\n\023e" + + "ncoded_region_name\030\001 \002(\014\022\022\n\ntable_name\030\002" + + " \002(\014\022\033\n\023log_sequence_number\030\003 \002(\004\022\022\n\nwri" + + "te_time\030\004 \002(\004\022\035\n\ncluster_id\030\005 \001(\0132\005.UUID" + + "B\002\030\001\022\034\n\006scopes\030\006 \003(\0132\014.FamilyScope\022\032\n\022fo" + + "llowing_kv_count\030\007 \001(\r\022\032\n\013cluster_ids\030\010 ", + "\003(\0132\005.UUID\022\022\n\nnonceGroup\030\t \001(\004\022\r\n\005nonce\030" + + "\n \001(\004\022\034\n\024orig_sequence_number\030\013 \001(\004\"=\n\013F" + + "amilyScope\022\016\n\006family\030\001 \002(\014\022\036\n\nscope_type" + + "\030\002 \002(\0162\n.ScopeType\"\276\001\n\024CompactionDescrip" + + "tor\022\022\n\ntable_name\030\001 \002(\014\022\033\n\023encoded_regio" + + "n_name\030\002 \002(\014\022\023\n\013family_name\030\003 \002(\014\022\030\n\020com" + + "paction_input\030\004 \003(\t\022\031\n\021compaction_output" + + "\030\005 \003(\t\022\026\n\016store_home_dir\030\006 \002(\t\022\023\n\013region" + + "_name\030\007 \001(\014\"\353\002\n\017FlushDescriptor\022,\n\006actio" + + "n\030\001 \002(\0162\034.FlushDescriptor.FlushAction\022\022\n", + "\ntable_name\030\002 \002(\014\022\033\n\023encoded_region_name" + + "\030\003 \002(\014\022\035\n\025flush_sequence_number\030\004 \001(\004\022<\n" + + "\rstore_flushes\030\005 \003(\0132%.FlushDescriptor.S" + + "toreFlushDescriptor\032Y\n\024StoreFlushDescrip" + + "tor\022\023\n\013family_name\030\001 \002(\014\022\026\n\016store_home_d" + + "ir\030\002 \002(\t\022\024\n\014flush_output\030\003 \003(\t\"A\n\013FlushA" + + "ction\022\017\n\013START_FLUSH\020\000\022\020\n\014COMMIT_FLUSH\020\001" + + "\022\017\n\013ABORT_FLUSH\020\002\"R\n\017StoreDescriptor\022\023\n\013" + + "family_name\030\001 \002(\014\022\026\n\016store_home_dir\030\002 \002(" + + "\t\022\022\n\nstore_file\030\003 \003(\t\"\212\002\n\025RegionEventDes", + "criptor\0224\n\nevent_type\030\001 \002(\0162 .RegionEven" + + "tDescriptor.EventType\022\022\n\ntable_name\030\002 \002(" + + "\014\022\033\n\023encoded_region_name\030\003 \002(\014\022\033\n\023log_se" + + "quence_number\030\004 \001(\004\022 \n\006stores\030\005 \003(\0132\020.St" + + "oreDescriptor\022\033\n\006server\030\006 \001(\0132\013.ServerNa" + + "me\".\n\tEventType\022\017\n\013REGION_OPEN\020\000\022\020\n\014REGI" + + "ON_CLOSE\020\001\"\215\001\n\022BulkLoadDescriptor\022\036\n\ntab" + + "le_name\030\001 \002(\0132\n.TableName\022\033\n\023encoded_reg" + + "ion_name\030\002 \002(\014\022 \n\006stores\030\003 \003(\0132\020.StoreDe" + + "scriptor\022\030\n\020bulkload_seq_num\030\004 \002(\003\"\014\n\nWA", + "LTrailer*F\n\tScopeType\022\033\n\027REPLICATION_SCO" + + "PE_LOCAL\020\000\022\034\n\030REPLICATION_SCOPE_GLOBAL\020\001" + + "B?\n*org.apache.hadoop.hbase.protobuf.gen" + + "eratedB\tWALProtosH\001\210\001\000\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { @@ -10500,20 +11658,26 @@ public final class WALProtos { com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_FlushDescriptor_StoreFlushDescriptor_descriptor, new java.lang.String[] { "FamilyName", "StoreHomeDir", "FlushOutput", }); - internal_static_RegionEventDescriptor_descriptor = + internal_static_StoreDescriptor_descriptor = getDescriptor().getMessageTypes().get(5); + internal_static_StoreDescriptor_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_StoreDescriptor_descriptor, + new java.lang.String[] { "FamilyName", "StoreHomeDir", "StoreFile", }); + internal_static_RegionEventDescriptor_descriptor = + getDescriptor().getMessageTypes().get(6); internal_static_RegionEventDescriptor_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RegionEventDescriptor_descriptor, new java.lang.String[] { "EventType", "TableName", "EncodedRegionName", "LogSequenceNumber", "Stores", "Server", }); - internal_static_RegionEventDescriptor_StoreDescriptor_descriptor = - internal_static_RegionEventDescriptor_descriptor.getNestedTypes().get(0); - internal_static_RegionEventDescriptor_StoreDescriptor_fieldAccessorTable = new + internal_static_BulkLoadDescriptor_descriptor = + getDescriptor().getMessageTypes().get(7); + internal_static_BulkLoadDescriptor_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_RegionEventDescriptor_StoreDescriptor_descriptor, - new java.lang.String[] { "FamilyName", "StoreHomeDir", "StoreFile", }); + internal_static_BulkLoadDescriptor_descriptor, + new java.lang.String[] { "TableName", "EncodedRegionName", "Stores", "BulkloadSeqNum", }); internal_static_WALTrailer_descriptor = - getDescriptor().getMessageTypes().get(6); + getDescriptor().getMessageTypes().get(8); internal_static_WALTrailer_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_WALTrailer_descriptor, @@ -10525,6 +11689,7 @@ public final class WALProtos { .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(), + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor(), }, assigner); } diff --git hbase-protocol/src/main/protobuf/WAL.proto hbase-protocol/src/main/protobuf/WAL.proto index dae92d2..04be987 100644 --- hbase-protocol/src/main/protobuf/WAL.proto +++ hbase-protocol/src/main/protobuf/WAL.proto @@ -22,6 +22,7 @@ option java_generate_equals_and_hash = true; option optimize_for = SPEED; import "HBase.proto"; +import "Client.proto"; message WALHeader { optional bool has_compression = 1; @@ -120,6 +121,12 @@ message FlushDescriptor { repeated StoreFlushDescriptor store_flushes = 5; } +message StoreDescriptor { + required bytes family_name = 1; + required string store_home_dir = 2; //relative to region dir + repeated string store_file = 3; // relative to store dir +} + /** * Special WAL entry to hold all related to a region event (open/close). */ @@ -129,12 +136,6 @@ message RegionEventDescriptor { REGION_CLOSE = 1; } - message StoreDescriptor { - required bytes family_name = 1; - required string store_home_dir = 2; //relative to region dir - repeated string store_file = 3; // relative to store dir - } - required EventType event_type = 1; required bytes table_name = 2; required bytes encoded_region_name = 3; @@ -144,6 +145,16 @@ message RegionEventDescriptor { } /** + * Special WAL entry used for HBASE-11567 (Write bulk load events to WAL) + */ +message BulkLoadDescriptor { + required TableName table_name = 1; + required bytes encoded_region_name = 2; + repeated StoreDescriptor stores = 3; + required int64 bulkload_seq_num = 4; +} + +/** * A trailer that is appended to the end of a properly closed HLog WAL file. * If missing, this is either a legacy or a corrupted WAL file. */ diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java index 274c1b3..74fb928 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java @@ -58,6 +58,7 @@ import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantReadWriteLock; +import com.google.protobuf.ByteString; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; @@ -68,7 +69,6 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CompoundConfiguration; -import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.DroppedSnapshotException; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HColumnDescriptor; @@ -118,6 +118,7 @@ import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; +import org.apache.hadoop.hbase.protobuf.generated.WALProtos; import org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor; import org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor; import org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor; @@ -133,6 +134,7 @@ import org.apache.hadoop.hbase.regionserver.wal.HLogUtil; import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils; import org.apache.hadoop.hbase.snapshot.SnapshotManifest; +import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.CancelableProgressable; import org.apache.hadoop.hbase.util.ClassSize; @@ -205,7 +207,7 @@ public class HRegion implements HeapSize { // , Writable{ * This is the global default value for durability. All tables/mutations not * defining a durability or using USE_DEFAULT will default to this value. */ - private static final Durability DEFAULT_DURABLITY = Durability.SYNC_WAL; + private static final Durability DEFAULT_DURABILITY = Durability.SYNC_WAL; final AtomicBoolean closed = new AtomicBoolean(false); /* Closing can take some time; use the closing flag if there is stuff we don't @@ -635,7 +637,7 @@ public class HRegion implements HeapSize { // , Writable{ this.rowProcessorTimeout = conf.getLong( "hbase.hregion.row.processor.timeout", DEFAULT_ROW_PROCESSOR_TIMEOUT); this.durability = htd.getDurability() == Durability.USE_DEFAULT - ? DEFAULT_DURABLITY + ? DEFAULT_DURABILITY : htd.getDurability(); if (rsServices != null) { this.rsAccounting = this.rsServices.getRegionServerAccounting(); @@ -716,7 +718,7 @@ public class HRegion implements HeapSize { // , Writable{ } private long initializeRegionInternals(final CancelableProgressable reporter, - final MonitoredTask status) throws IOException, UnsupportedEncodingException { + final MonitoredTask status) throws IOException { if (coprocessorHost != null) { status.setStatus("Running coprocessor pre-open hook"); coprocessorHost.preOpen(); @@ -782,7 +784,7 @@ public class HRegion implements HeapSize { // , Writable{ } private long initializeRegionStores(final CancelableProgressable reporter, MonitoredTask status) - throws IOException, UnsupportedEncodingException { + throws IOException { // Load in all the HStores. long maxSeqId = -1; @@ -2058,7 +2060,7 @@ public class HRegion implements HeapSize { // , Writable{ return getScanner(scan, null); } - void prepareScanner(Scan scan) throws IOException { + void prepareScanner(Scan scan) { if(!scan.hasFamilies()) { // Adding all families to scanner for(byte[] family: this.htableDescriptor.getFamiliesKeys()){ @@ -2388,7 +2390,7 @@ public class HRegion implements HeapSize { // , Writable{ /** * Perform a batch of mutations. * It supports only Put and Delete mutations and will ignore other types passed. - * @param mutations the list of mutations + * @param batchOp the list of mutations * @return an array of OperationStatus which internally contains the * OperationStatusCode and the exceptionMessage if any. * @throws IOException @@ -2950,7 +2952,7 @@ public class HRegion implements HeapSize { // , Writable{ } } - private void doBatchMutate(Mutation mutation) throws IOException, DoNotRetryIOException { + private void doBatchMutate(Mutation mutation) throws IOException { // Currently this is only called for puts and deletes, so no nonces. OperationStatus[] batchMutate = this.batchMutate(new Mutation[] { mutation }, HConstants.NO_NONCE, HConstants.NO_NONCE); @@ -3064,9 +3066,9 @@ public class HRegion implements HeapSize { // , Writable{ * not check the families for validity. * * @param familyMap Map of kvs per family - * @param localizedWriteEntry The WriteEntry of the MVCC for this transaction. + * @param mvccNum The WriteEntry of the MVCC for this transaction. * If null, then this method internally creates a mvcc transaction. - * @param output newly added KVs into memstore + * @param memstoreCells newly added KVs into memstore * @return the additional memory usage of the memstore caused by the * new entries. */ @@ -3238,7 +3240,7 @@ public class HRegion implements HeapSize { // , Writable{ protected long replayRecoveredEditsIfAny(final Path regiondir, Map maxSeqIdInStores, final CancelableProgressable reporter, final MonitoredTask status) - throws UnsupportedEncodingException, IOException { + throws IOException { long minSeqIdForTheRegion = -1; for (Long maxSeqIdInStore : maxSeqIdInStores.values()) { if (maxSeqIdInStore < minSeqIdForTheRegion || minSeqIdForTheRegion == -1) { @@ -3709,7 +3711,17 @@ public class HRegion implements HeapSize { // , Writable{ return multipleFamilies; } - + /** + * Bulk load a/many HFiles into this region + * + * @param familyPaths A list which maps column families to the location of the HFile to load + * into that column family region. + * @param assignSeqId Force a flush, get it's sequenceId to preserve the guarantee that all the + * edits lower than the highest sequential ID from all the HFiles are flushed + * on disk. + * @return true if successful, false if failed recoverably + * @throws IOException if failed unrecoverably. + */ public boolean bulkLoadHFiles(List> familyPaths, boolean assignSeqId) throws IOException { return bulkLoadHFiles(familyPaths, assignSeqId, null); @@ -3719,14 +3731,19 @@ public class HRegion implements HeapSize { // , Writable{ * Attempts to atomically load a group of hfiles. This is critical for loading * rows with multiple column families atomically. * - * @param familyPaths List of Pair + * @param familyPaths List of Pair * @param bulkLoadListener Internal hooks enabling massaging/preparation of a - * file about to be bulk loaded + * file about to be bulk loaded + * @param assignSeqId Force a flush, get it's sequenceId to preserve the guarantee that all the + * edits lower than the highest sequential ID from all the HFiles are flushed + * on disk. * @return true if successful, false if failed recoverably * @throws IOException if failed unrecoverably. */ public boolean bulkLoadHFiles(List> familyPaths, boolean assignSeqId, - BulkLoadListener bulkLoadListener) throws IOException { + BulkLoadListener bulkLoadListener) throws IOException { + long seqId = -1; + Map> storeFiles = new TreeMap>(Bytes.BYTES_COMPARATOR); Preconditions.checkNotNull(familyPaths); // we need writeLock for multi-family bulk load startBulkRegionOperation(hasMultipleColumnFamilies(familyPaths)); @@ -3772,7 +3789,7 @@ public class HRegion implements HeapSize { // , Writable{ StringBuilder list = new StringBuilder(); for (Pair p : failures) { list.append("\n").append(Bytes.toString(p.getFirst())).append(" : ") - .append(p.getSecond()); + .append(p.getSecond()); } // problem when validating LOG.warn("There was a recoverable bulk load failure likely due to a" + @@ -3780,7 +3797,6 @@ public class HRegion implements HeapSize { // , Writable{ return false; } - long seqId = -1; // We need to assign a sequential ID that's in between two memstores in order to preserve // the guarantee that all the edits lower than the highest sequential ID from all the // HFiles are flushed on disk. See HBASE-10958. The sequence id returned when we flush is @@ -3804,11 +3820,19 @@ public class HRegion implements HeapSize { // , Writable{ Store store = getStore(familyName); try { String finalPath = path; - if(bulkLoadListener != null) { + if (bulkLoadListener != null) { finalPath = bulkLoadListener.prepareBulkLoad(familyName, path); } store.bulkLoadHFile(finalPath, seqId); - if(bulkLoadListener != null) { + + if(storeFiles.containsKey(familyName)) { + storeFiles.get(familyName).add(new Path(finalPath)); + } else { + List storeFileNames = new ArrayList(); + storeFileNames.add(new Path(finalPath)); + storeFiles.put(familyName, storeFileNames); + } + if (bulkLoadListener != null) { bulkLoadListener.doneBulkLoad(familyName, path); } } catch (IOException ioe) { @@ -3817,20 +3841,38 @@ public class HRegion implements HeapSize { // , Writable{ // TODO Need a better story for reverting partial failures due to HDFS. LOG.error("There was a partial failure due to IO when attempting to" + - " load " + Bytes.toString(p.getFirst()) + " : "+ p.getSecond(), ioe); - if(bulkLoadListener != null) { + " load " + Bytes.toString(p.getFirst()) + " : " + p.getSecond(), ioe); + if (bulkLoadListener != null) { try { bulkLoadListener.failedBulkLoad(familyName, path); } catch (Exception ex) { - LOG.error("Error while calling failedBulkLoad for family "+ - Bytes.toString(familyName)+" with path "+path, ex); + LOG.error("Error while calling failedBulkLoad for family " + + Bytes.toString(familyName) + " with path " + path, ex); } } throw ioe; } } + return true; } finally { + if (log != null && !storeFiles.isEmpty()) { + // write a bulk load even when not all hfiles are loaded + try { + WALProtos.BulkLoadDescriptor loadDescriptor = ProtobufUtil.toBulkLoadDescriptor(this + .getRegionInfo().getTable(), ByteStringer.wrap(this.getRegionInfo() + .getEncodedNameAsBytes()), storeFiles, seqId); + HLogUtil.writeBulkLoadMarkerAndSync(log, this.htableDescriptor, getRegionInfo(), + loadDescriptor, sequenceId); + } catch (IOException ioe) { + if (this.rsServices != null) { + // Have to abort region server because some hfiles has been loaded but we can't write + // the event into WAL + this.rsServices.abort("Failed to write bulk load event into WAL.", ioe); + } + } + } + closeBulkRegionOperation(); } } @@ -5017,8 +5059,6 @@ public class HRegion implements HeapSize { // , Writable{ doProcessRowWithTimeout( processor, now, this, null, null, timeout); processor.postProcess(this, walEdit, true); - } catch (IOException e) { - throw e; } finally { closeRegionOperation(); } @@ -5134,8 +5174,6 @@ public class HRegion implements HeapSize { // , Writable{ // 14. Run post-process hook processor.postProcess(this, walEdit, walSyncSuccessful); - } catch (IOException e) { - throw e; } finally { closeRegionOperation(); if (!mutations.isEmpty() && @@ -6136,7 +6174,7 @@ public class HRegion implements HeapSize { // , Writable{ } /** - * A mocked list implementaion - discards all updates. + * A mocked list implementation - discards all updates. */ private static final List MOCKED_LIST = new AbstractList() { @@ -6363,7 +6401,7 @@ public class HRegion implements HeapSize { // , Writable{ private HLogKey appendNoSyncNoAppend(final HLog wal, List cells) throws IOException { HLogKey key = new HLogKey(getRegionInfo().getEncodedNameAsBytes(), getRegionInfo().getTable(), HLog.NO_SEQUENCE_ID, 0, null, HConstants.NO_NONCE, HConstants.NO_NONCE); - // Call append but with an empty WALEdit. The returned seqeunce id will not be associated + // Call append but with an empty WALEdit. The returned sequence id will not be associated // with any edit and we can be sure it went in after all outstanding appends. wal.appendNoSync(getTableDesc(), getRegionInfo(), key, WALEdit.EMPTY_WALEDIT, this.sequenceId, false, cells); @@ -6371,7 +6409,7 @@ public class HRegion implements HeapSize { // , Writable{ } /** - * Explictly sync wal + * Explicitly sync wal * @throws IOException */ public void syncWal() throws IOException { diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogUtil.java hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogUtil.java index ce353b6..a2bd3f3 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogUtil.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogUtil.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.regionserver.wal; import java.io.IOException; +import java.util.ArrayList; import java.util.NavigableSet; import java.util.TreeSet; import java.util.concurrent.atomic.AtomicLong; @@ -36,8 +37,10 @@ import org.apache.hadoop.fs.PathFilter; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.protobuf.generated.WALProtos; import org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor; import org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor; import org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor; @@ -301,4 +304,40 @@ public class HLogUtil { } return trx; } -} + + /** + * Write a log marker that a bulk load has succeeded and is about to be committed. + * + * @param log The log to write into. + * @param htd A description of the table that we are bulk loading into. + * @param info A description of the region in the table that we are bulk loading into. + * @param descriptor A protocol buffers based description of the client's bulk loading request + * @param sequenceId The current sequenceId in the log at the time when we were to write the + * bulk load marker. + * @return txid of this transaction or if nothing to do, the last txid + * @throws IOException We will throw an IOException if we can not append to the HLog. + */ + public static long writeBulkLoadMarkerAndSync(final HLog log, + final HTableDescriptor htd, + final HRegionInfo info, + final WALProtos.BulkLoadDescriptor descriptor, + final AtomicLong sequenceId) throws IOException { + TableName tn = info.getTable(); + HLogKey key = new HLogKey(info.getEncodedNameAsBytes(), tn); + + // Add it to the log but the false specifies that we don't need to add it to the memstore + long trx = log.appendNoSync(htd, + info, + key, + WALEdit.createBulkLoadEvent(info, descriptor), + sequenceId, + false, + new ArrayList()); + log.sync(trx); + + if (LOG.isTraceEnabled()) { + LOG.trace("Appended Bulk Load marker " + TextFormat.shortDebugString(descriptor)); + } + return trx; + } +} \ No newline at end of file diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java index 889901c..549bb5c 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java @@ -82,10 +82,9 @@ public class WALCellCodec implements Codec { } /** - * Create and setup a {@link WALCellCodec} from the {@link cellCodecClsName} and - * CompressionContext, if {@link cellCodecClsName} is specified. - * Otherwise Cell Codec classname is read from {@link Configuration}. - * Fully prepares the codec for use. + * Create and setup a {@link WALCellCodec} from the cellCodecClsName and CompressionContext, if + * cellCodecClsName is specified. Otherwise Cell Codec classname is read from + * {@link Configuration}. Fully prepares the codec for use. * @param conf {@link Configuration} to read for the user-specified codec. If none is specified, * uses a {@link WALCellCodec}. * @param compression compression the codec should use diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEdit.java hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEdit.java index 4b38027..908c485 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEdit.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEdit.java @@ -35,6 +35,7 @@ import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.codec.Codec; import org.apache.hadoop.hbase.io.HeapSize; +import org.apache.hadoop.hbase.protobuf.generated.WALProtos; import org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor; import org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor; import org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor; @@ -88,6 +89,7 @@ public class WALEdit implements Writable, HeapSize { static final byte[] COMPACTION = Bytes.toBytes("HBASE::COMPACTION"); static final byte [] FLUSH = Bytes.toBytes("HBASE::FLUSH"); static final byte [] REGION_EVENT = Bytes.toBytes("HBASE::REGION_EVENT"); + public static final byte [] BULK_LOAD = Bytes.toBytes("HBASE::BULK_LOAD"); private final int VERSION_2 = -1; private final boolean isReplay; @@ -294,7 +296,7 @@ public class WALEdit implements Writable, HeapSize { } /** - * Create a compacion WALEdit + * Create a compaction WALEdit * @param c * @return A WALEdit that has c serialized as its value */ @@ -326,4 +328,33 @@ public class WALEdit implements Writable, HeapSize { } return null; } -} + + /** + * Create a bulk loader WALEdit + * + * @param hri The HRegionInfo for the region in which we are bulk loading + * @param bulkLoadDescriptor The descriptor for the Bulk Loader + * @return The WALEdit for the BulkLoad + */ + public static WALEdit createBulkLoadEvent(HRegionInfo hri, + WALProtos.BulkLoadDescriptor bulkLoadDescriptor) { + KeyValue kv = new KeyValue(getRowForRegion(hri), + METAFAMILY, + BULK_LOAD, + EnvironmentEdgeManager.currentTimeMillis(), + bulkLoadDescriptor.toByteArray()); + return new WALEdit().add(kv); + } + + /** + * Deserialized and returns a BulkLoadDescriptor from the passed in Cell + * @param cell the key value + * @return deserialized BulkLoadDescriptor or null. + */ + public static WALProtos.BulkLoadDescriptor getBulkLoadDescriptor(Cell cell) throws IOException { + if (CellUtil.matchingColumn(cell, METAFAMILY, BULK_LOAD)) { + return WALProtos.BulkLoadDescriptor.parseFrom(cell.getValue()); + } + return null; + } +} \ No newline at end of file diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBulkLoad.java hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBulkLoad.java new file mode 100644 index 0000000..ade9d59 --- /dev/null +++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBulkLoad.java @@ -0,0 +1,314 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.regionserver; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FSDataOutputStream; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.DoNotRetryIOException; +import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.HColumnDescriptor; +import org.apache.hadoop.hbase.HRegionInfo; +import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.SmallTests; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.io.hfile.HFile; +import org.apache.hadoop.hbase.io.hfile.HFileContext; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor; +import org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor; +import org.apache.hadoop.hbase.regionserver.HRegion; +import org.apache.hadoop.hbase.regionserver.wal.HLog; +import org.apache.hadoop.hbase.regionserver.wal.HLogKey; +import org.apache.hadoop.hbase.regionserver.wal.WALEdit; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.Pair; +import org.hamcrest.Description; +import org.hamcrest.Matcher; +import org.hamcrest.TypeSafeMatcher; +import org.jmock.Expectations; +import org.jmock.integration.junit4.JUnitRuleMockery; +import org.jmock.lib.concurrent.Synchroniser; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Rule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.TemporaryFolder; +import org.junit.rules.TestName; +import org.mockito.ArgumentCaptor; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Random; +import java.util.concurrent.atomic.AtomicLong; + +import static java.util.Arrays.asList; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + +/** + * This class attempts to unit test bulk HLog loading. + */ +@Category(SmallTests.class) +public class TestBulkLoad { + + @ClassRule + public static TemporaryFolder testFolder = new TemporaryFolder(); + @Rule + public final JUnitRuleMockery context = new JUnitRuleMockery() {{ + setThreadingPolicy(new Synchroniser()); + }}; + private final HLog log = context.mock(HLog.class); + private final Configuration conf = HBaseConfiguration.create(); + private final Random random = new Random(); + private final byte[] randomBytes = new byte[100]; + private final byte[] family1 = Bytes.toBytes("family1"); + private final byte[] family2 = Bytes.toBytes("family2"); + private final Expectations callOnce; + @Rule + public TestName name = new TestName(); + + public TestBulkLoad() throws IOException { + callOnce = new Expectations() { + { + oneOf(log).appendNoSync(with(any(HTableDescriptor.class)), with(any(HRegionInfo.class)), + with(any(HLogKey.class)), with(bulkLogWalEditType(WALEdit.BULK_LOAD)), + with(any(AtomicLong.class)), with(any(boolean.class)), with(any(List.class))); + will(returnValue(0l)); + oneOf(log).sync(with(any(long.class))); + } + }; + } + + @Before + public void before() throws IOException { + random.nextBytes(randomBytes); + } + + @Test + public void verifyBulkLoadEvent() throws IOException { + TableName tableName = TableName.valueOf("test", "test"); + List> familyPaths = withFamilyPathsFor(family1); + byte[] familyName = familyPaths.get(0).getFirst(); + String storeFileName = familyPaths.get(0).getSecond(); + storeFileName = (new Path(storeFileName)).getName(); + List storeFileNames = new ArrayList(); + storeFileNames.add(storeFileName); + final Matcher bulkEventMatcher = bulkLogWalEdit(WALEdit.BULK_LOAD, + tableName.toBytes(), familyName, storeFileNames); + Expectations expection = new Expectations() { + { + oneOf(log).appendNoSync(with(any(HTableDescriptor.class)), with(any(HRegionInfo.class)), + with(any(HLogKey.class)), with(bulkEventMatcher), + with(any(AtomicLong.class)), with(any(boolean.class)), with(any(List.class))); + will(returnValue(0l)); + oneOf(log).sync(with(any(long.class))); + } + }; + context.checking(expection); + testRegionWithFamiliesAndSpecifiedTableName(tableName, family1) + .bulkLoadHFiles(familyPaths, false); + } + + @Test + public void bulkHLogShouldThrowNoErrorAndWriteMarkerWithBlankInput() throws IOException { + testRegionWithFamilies(family1).bulkLoadHFiles(new ArrayList>(), false); + } + + @Test + public void shouldBulkLoadSingleFamilyHLog() throws IOException { + context.checking(callOnce); + testRegionWithFamilies(family1).bulkLoadHFiles(withFamilyPathsFor(family1), false); + } + + @Test + public void shouldBulkLoadManyFamilyHLog() throws IOException { + context.checking(callOnce); + testRegionWithFamilies(family1, family2).bulkLoadHFiles(withFamilyPathsFor(family1, family2), + false); + } + + @Test + public void shouldBulkLoadManyFamilyHLogEvenWhenTableNameNamespaceSpecified() throws IOException { + context.checking(callOnce); + TableName tableName = TableName.valueOf("test", "test"); + testRegionWithFamiliesAndSpecifiedTableName(tableName, family1, family2) + .bulkLoadHFiles(withFamilyPathsFor(family1, family2), false); + } + + @Test(expected = DoNotRetryIOException.class) + public void shouldCrashIfBulkLoadFamiliesNotInTable() throws IOException { + testRegionWithFamilies(family1).bulkLoadHFiles(withFamilyPathsFor(family1, family2), false); + } + + @Test(expected = DoNotRetryIOException.class) + public void bulkHLogShouldThrowErrorWhenFamilySpecifiedAndHFileExistsButNotInTableDescriptor() + throws IOException { + testRegionWithFamilies().bulkLoadHFiles(withFamilyPathsFor(family1), false); + } + + @Test(expected = DoNotRetryIOException.class) + public void shouldThrowErrorIfBadFamilySpecifiedAsFamilyPath() throws IOException { + testRegionWithFamilies() + .bulkLoadHFiles(asList(withInvalidColumnFamilyButProperHFileLocation(family1)), + false); + } + + @Test(expected = FileNotFoundException.class) + public void shouldThrowErrorIfHFileDoesNotExist() throws IOException { + List> list = asList(withMissingHFileForFamily(family1)); + testRegionWithFamilies(family1).bulkLoadHFiles(list, false); + } + + private Pair withMissingHFileForFamily(byte[] family) { + return new Pair(family, "/tmp/does_not_exist"); + } + + private Pair withInvalidColumnFamilyButProperHFileLocation(byte[] family) + throws IOException { + createHFileForFamilies(family); + return new Pair(new byte[]{0x00, 0x01, 0x02}, "/tmp/does_not_exist"); + } + + + private HRegion testRegionWithFamiliesAndSpecifiedTableName(TableName tableName, + byte[]... families) + throws IOException { + HRegionInfo hRegionInfo = new HRegionInfo(tableName); + HTableDescriptor hTableDescriptor = new HTableDescriptor(tableName); + for (byte[] family : families) { + hTableDescriptor.addFamily(new HColumnDescriptor(family)); + } + + // TODO We need a way to do this without creating files + return HRegion.createHRegion(hRegionInfo, + new Path(testFolder.newFolder().toURI()), + conf, + hTableDescriptor, + log); + + } + + private HRegion testRegionWithFamilies(byte[]... families) throws IOException { + TableName tableName = TableName.valueOf(name.getMethodName()); + return testRegionWithFamiliesAndSpecifiedTableName(tableName, families); + } + + private List> getBlankFamilyPaths(){ + return new ArrayList>(); + } + + private List> withFamilyPathsFor(byte[]... families) throws IOException { + List> familyPaths = getBlankFamilyPaths(); + for (byte[] family : families) { + familyPaths.add(new Pair(family, createHFileForFamilies(family))); + } + return familyPaths; + } + + private String createHFileForFamilies(byte[] family) throws IOException { + HFile.WriterFactory hFileFactory = HFile.getWriterFactoryNoCache(conf); + // TODO We need a way to do this without creating files + File hFileLocation = testFolder.newFile(); + hFileFactory.withOutputStream(new FSDataOutputStream(new FileOutputStream(hFileLocation))); + hFileFactory.withFileContext(new HFileContext()); + HFile.Writer writer = hFileFactory.create(); + + writer.append(new KeyValue(CellUtil.createCell(randomBytes, + family, + randomBytes, + 0l, + KeyValue.Type.Put.getCode(), + randomBytes))); + writer.close(); + return hFileLocation.getAbsoluteFile().getAbsolutePath(); + } + + private static Matcher bulkLogWalEditType(byte[] typeBytes) { + return new WalMatcher(typeBytes); + } + + private static Matcher bulkLogWalEdit(byte[] typeBytes, byte[] tableName, + byte[] familyName, List storeFileNames) { + return new WalMatcher(typeBytes, tableName, familyName, storeFileNames); + } + + private static class WalMatcher extends TypeSafeMatcher { + private final byte[] typeBytes; + private final byte[] tableName; + private final byte[] familyName; + private final List storeFileNames; + + public WalMatcher(byte[] typeBytes) { + this(typeBytes, null, null, null); + } + + public WalMatcher(byte[] typeBytes, byte[] tableName, byte[] familyName, + List storeFileNames) { + this.typeBytes = typeBytes; + this.tableName = tableName; + this.familyName = familyName; + this.storeFileNames = storeFileNames; + } + + @Override + protected boolean matchesSafely(WALEdit item) { + assertTrue(Arrays.equals(item.getKeyValues().get(0).getQualifier(), typeBytes)); + BulkLoadDescriptor desc; + try { + desc = WALEdit.getBulkLoadDescriptor(item.getKeyValues().get(0)); + } catch (IOException e) { + return false; + } + assertNotNull(desc); + + if (tableName != null) { + assertTrue(Bytes.equals(ProtobufUtil.toTableName(desc.getTableName()).getName(), + tableName)); + } + + if(storeFileNames != null) { + int index=0; + StoreDescriptor store = desc.getStores(0); + assertTrue(Bytes.equals(store.getFamilyName().toByteArray(), familyName)); + assertTrue(Bytes.equals(Bytes.toBytes(store.getStoreHomeDir()), familyName)); + assertEquals(storeFileNames.size(), store.getStoreFileCount()); + for (String storeFile : store.getStoreFileList()) { + assertTrue(storeFile.equals(storeFileNames.get(index++))); + } + } + + return true; + } + + @Override + public void describeTo(Description description) { + + } + } +} diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java index 9fa430f..45a9544 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java @@ -120,7 +120,7 @@ import org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor; import org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor; import org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.FlushAction; import org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor; -import org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor; +import org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor; import org.apache.hadoop.hbase.regionserver.HRegion.RegionScannerImpl; import org.apache.hadoop.hbase.regionserver.HRegion.RowLock; import org.apache.hadoop.hbase.regionserver.TestStore.FaultyFileSystem; diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java index 26570d1..0e9b680 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java @@ -17,11 +17,7 @@ */ package org.apache.hadoop.hbase.regionserver; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.concurrent.atomic.AtomicLong; - +import com.google.common.collect.Lists; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; @@ -55,12 +51,23 @@ import org.apache.hadoop.hbase.protobuf.RequestConverter; import org.apache.hadoop.hbase.protobuf.generated.AdminProtos; import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest; +import org.apache.hadoop.hbase.regionserver.wal.HLog; +import org.apache.hadoop.hbase.regionserver.wal.HLogKey; +import org.apache.hadoop.hbase.regionserver.wal.TestWALActionsListener; +import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.collect.Lists; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.atomic.AtomicLong; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.Is.is; /** * Tests bulk loading of HFiles and shows the atomicity or lack of atomicity of @@ -288,7 +295,11 @@ public class TestHRegionServerBulkLoad { UTIL.startMiniCluster(1); try { + HLog log = UTIL.getHBaseCluster().getRegionServer(0).getWAL(); + FindBulkHBaseListener listener = new FindBulkHBaseListener(); + log.registerWALActionsListener(listener); runAtomicBulkloadTest(TABLE_NAME, millisToRun, numScanners); + assertThat(listener.isFound(), is(true)); } finally { UTIL.shutdownMiniCluster(); } @@ -344,5 +355,24 @@ public class TestHRegionServerBulkLoad { UTIL = new HBaseTestingUtility(c); } + static class FindBulkHBaseListener extends TestWALActionsListener.DummyWALActionsListener { + private boolean found = false; + + @Override + public void visitLogEntryBeforeWrite(HTableDescriptor htd, HLogKey logKey, WALEdit logEdit) { + for (KeyValue kv : logEdit.getKeyValues()) { + for (Map.Entry entry : kv.toStringMap().entrySet()) { + if (entry.getValue().equals(Bytes.toString(WALEdit.BULK_LOAD))) { + found = true; + } + } + } + } + + public boolean isFound() { + return found; + } + } } + diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALActionsListener.java hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALActionsListener.java index 0a31d9f..6346c0d 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALActionsListener.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALActionsListener.java @@ -125,7 +125,7 @@ public class TestWALActionsListener { /** * Just counts when methods are called */ - static class DummyWALActionsListener implements WALActionsListener { + public static class DummyWALActionsListener implements WALActionsListener { public int preLogRollCounter = 0; public int postLogRollCounter = 0; public int closedCount = 0; diff --git pom.xml pom.xml index 84f279d..98aea77 100644 --- pom.xml +++ pom.xml @@ -932,6 +932,7 @@ 6.1.26 6.1.14 1.9 + 2.6.0 1.6.8 4.11 3.0.4 @@ -1387,6 +1388,18 @@ disruptor ${disruptor.version} + + org.jmock + jmock-junit4 + ${jmock-junit4.version} + test + + + junit-dep + junit + + + @@ -1410,6 +1423,10 @@ org.mockito mockito-all + + org.jmock + jmock-junit4 +