commit f1722e6c9af6cec089257fbcefc5f0382a565814 Author: Enis Soztutar Date: Sat Aug 6 00:20:52 2016 -0700 HBASE-16478 Rename WALKey in PB to WALEdit diff --git hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AdminProtos.java hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AdminProtos.java index 9deba2e..18e7d7ad 100644 --- hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AdminProtos.java +++ hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AdminProtos.java @@ -15947,19 +15947,19 @@ public final class AdminProtos { public interface WALEntryOrBuilder extends com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.WALKey key = 1; + // required .hbase.pb.WALEdit edit = 1; /** - * required .hbase.pb.WALKey key = 1; + * required .hbase.pb.WALEdit edit = 1; */ - boolean hasKey(); + boolean hasEdit(); /** - * required .hbase.pb.WALKey key = 1; + * required .hbase.pb.WALEdit edit = 1; */ - org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey getKey(); + org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit getEdit(); /** - * required .hbase.pb.WALKey key = 1; + * required .hbase.pb.WALEdit edit = 1; */ - org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKeyOrBuilder getKeyOrBuilder(); + org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEditOrBuilder getEditOrBuilder(); // repeated bytes key_value_bytes = 2; /** @@ -16067,14 +16067,14 @@ public final class AdminProtos { break; } case 10: { - org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.Builder subBuilder = null; + org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { - subBuilder = key_.toBuilder(); + subBuilder = edit_.toBuilder(); } - key_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.PARSER, extensionRegistry); + edit_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit.PARSER, extensionRegistry); if (subBuilder != null) { - subBuilder.mergeFrom(key_); - key_ = subBuilder.buildPartial(); + subBuilder.mergeFrom(edit_); + edit_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; @@ -16135,26 +16135,26 @@ public final class AdminProtos { } private int bitField0_; - // required .hbase.pb.WALKey key = 1; - public static final int KEY_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey key_; + // required .hbase.pb.WALEdit edit = 1; + public static final int EDIT_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit edit_; /** - * required .hbase.pb.WALKey key = 1; + * required .hbase.pb.WALEdit edit = 1; */ - public boolean hasKey() { + public boolean hasEdit() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * required .hbase.pb.WALKey key = 1; + * required .hbase.pb.WALEdit edit = 1; */ - public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey getKey() { - return key_; + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit getEdit() { + return edit_; } /** - * required .hbase.pb.WALKey key = 1; + * required .hbase.pb.WALEdit edit = 1; */ - public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKeyOrBuilder getKeyOrBuilder() { - return key_; + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEditOrBuilder getEditOrBuilder() { + return edit_; } // repeated bytes key_value_bytes = 2; @@ -16223,7 +16223,7 @@ public final class AdminProtos { } private void initFields() { - key_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.getDefaultInstance(); + edit_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit.getDefaultInstance(); keyValueBytes_ = java.util.Collections.emptyList(); associatedCellCount_ = 0; } @@ -16232,11 +16232,11 @@ public final class AdminProtos { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - if (!hasKey()) { + if (!hasEdit()) { memoizedIsInitialized = 0; return false; } - if (!getKey().isInitialized()) { + if (!getEdit().isInitialized()) { memoizedIsInitialized = 0; return false; } @@ -16248,7 +16248,7 @@ public final class AdminProtos { throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, key_); + output.writeMessage(1, edit_); } for (int i = 0; i < keyValueBytes_.size(); i++) { output.writeBytes(2, keyValueBytes_.get(i)); @@ -16267,7 +16267,7 @@ public final class AdminProtos { size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, key_); + .computeMessageSize(1, edit_); } { int dataSize = 0; @@ -16305,10 +16305,10 @@ public final class AdminProtos { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry) obj; boolean result = true; - result = result && (hasKey() == other.hasKey()); - if (hasKey()) { - result = result && getKey() - .equals(other.getKey()); + result = result && (hasEdit() == other.hasEdit()); + if (hasEdit()) { + result = result && getEdit() + .equals(other.getEdit()); } result = result && getKeyValueBytesList() .equals(other.getKeyValueBytesList()); @@ -16330,9 +16330,9 @@ public final class AdminProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasKey()) { - hash = (37 * hash) + KEY_FIELD_NUMBER; - hash = (53 * hash) + getKey().hashCode(); + if (hasEdit()) { + hash = (37 * hash) + EDIT_FIELD_NUMBER; + hash = (53 * hash) + getEdit().hashCode(); } if (getKeyValueBytesCount() > 0) { hash = (37 * hash) + KEY_VALUE_BYTES_FIELD_NUMBER; @@ -16447,7 +16447,7 @@ public final class AdminProtos { } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getKeyFieldBuilder(); + getEditFieldBuilder(); } } private static Builder create() { @@ -16456,10 +16456,10 @@ public final class AdminProtos { public Builder clear() { super.clear(); - if (keyBuilder_ == null) { - key_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.getDefaultInstance(); + if (editBuilder_ == null) { + edit_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit.getDefaultInstance(); } else { - keyBuilder_.clear(); + editBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); keyValueBytes_ = java.util.Collections.emptyList(); @@ -16497,10 +16497,10 @@ public final class AdminProtos { if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } - if (keyBuilder_ == null) { - result.key_ = key_; + if (editBuilder_ == null) { + result.edit_ = edit_; } else { - result.key_ = keyBuilder_.build(); + result.edit_ = editBuilder_.build(); } if (((bitField0_ & 0x00000002) == 0x00000002)) { keyValueBytes_ = java.util.Collections.unmodifiableList(keyValueBytes_); @@ -16527,8 +16527,8 @@ public final class AdminProtos { public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.getDefaultInstance()) return this; - if (other.hasKey()) { - mergeKey(other.getKey()); + if (other.hasEdit()) { + mergeEdit(other.getEdit()); } if (!other.keyValueBytes_.isEmpty()) { if (keyValueBytes_.isEmpty()) { @@ -16548,11 +16548,11 @@ public final class AdminProtos { } public final boolean isInitialized() { - if (!hasKey()) { + if (!hasEdit()) { return false; } - if (!getKey().isInitialized()) { + if (!getEdit().isInitialized()) { return false; } @@ -16578,121 +16578,121 @@ public final class AdminProtos { } private int bitField0_; - // required .hbase.pb.WALKey key = 1; - private org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey key_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.getDefaultInstance(); + // required .hbase.pb.WALEdit edit = 1; + private org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit edit_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey, org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKeyOrBuilder> keyBuilder_; + org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit, org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEditOrBuilder> editBuilder_; /** - * required .hbase.pb.WALKey key = 1; + * required .hbase.pb.WALEdit edit = 1; */ - public boolean hasKey() { + public boolean hasEdit() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * required .hbase.pb.WALKey key = 1; + * required .hbase.pb.WALEdit edit = 1; */ - public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey getKey() { - if (keyBuilder_ == null) { - return key_; + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit getEdit() { + if (editBuilder_ == null) { + return edit_; } else { - return keyBuilder_.getMessage(); + return editBuilder_.getMessage(); } } /** - * required .hbase.pb.WALKey key = 1; + * required .hbase.pb.WALEdit edit = 1; */ - public Builder setKey(org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey value) { - if (keyBuilder_ == null) { + public Builder setEdit(org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit value) { + if (editBuilder_ == null) { if (value == null) { throw new NullPointerException(); } - key_ = value; + edit_ = value; onChanged(); } else { - keyBuilder_.setMessage(value); + editBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** - * required .hbase.pb.WALKey key = 1; + * required .hbase.pb.WALEdit edit = 1; */ - public Builder setKey( - org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.Builder builderForValue) { - if (keyBuilder_ == null) { - key_ = builderForValue.build(); + public Builder setEdit( + org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit.Builder builderForValue) { + if (editBuilder_ == null) { + edit_ = builderForValue.build(); onChanged(); } else { - keyBuilder_.setMessage(builderForValue.build()); + editBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** - * required .hbase.pb.WALKey key = 1; + * required .hbase.pb.WALEdit edit = 1; */ - public Builder mergeKey(org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey value) { - if (keyBuilder_ == null) { + public Builder mergeEdit(org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit value) { + if (editBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && - key_ != org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.getDefaultInstance()) { - key_ = - org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.newBuilder(key_).mergeFrom(value).buildPartial(); + edit_ != org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit.getDefaultInstance()) { + edit_ = + org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit.newBuilder(edit_).mergeFrom(value).buildPartial(); } else { - key_ = value; + edit_ = value; } onChanged(); } else { - keyBuilder_.mergeFrom(value); + editBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** - * required .hbase.pb.WALKey key = 1; + * required .hbase.pb.WALEdit edit = 1; */ - public Builder clearKey() { - if (keyBuilder_ == null) { - key_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.getDefaultInstance(); + public Builder clearEdit() { + if (editBuilder_ == null) { + edit_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit.getDefaultInstance(); onChanged(); } else { - keyBuilder_.clear(); + editBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** - * required .hbase.pb.WALKey key = 1; + * required .hbase.pb.WALEdit edit = 1; */ - public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.Builder getKeyBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit.Builder getEditBuilder() { bitField0_ |= 0x00000001; onChanged(); - return getKeyFieldBuilder().getBuilder(); + return getEditFieldBuilder().getBuilder(); } /** - * required .hbase.pb.WALKey key = 1; + * required .hbase.pb.WALEdit edit = 1; */ - public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKeyOrBuilder getKeyOrBuilder() { - if (keyBuilder_ != null) { - return keyBuilder_.getMessageOrBuilder(); + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEditOrBuilder getEditOrBuilder() { + if (editBuilder_ != null) { + return editBuilder_.getMessageOrBuilder(); } else { - return key_; + return edit_; } } /** - * required .hbase.pb.WALKey key = 1; + * required .hbase.pb.WALEdit edit = 1; */ private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey, org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKeyOrBuilder> - getKeyFieldBuilder() { - if (keyBuilder_ == null) { - keyBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey, org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKeyOrBuilder>( - key_, + org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit, org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEditOrBuilder> + getEditFieldBuilder() { + if (editBuilder_ == null) { + editBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit, org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEditOrBuilder>( + edit_, getParentForChildren(), isClean()); - key_ = null; + edit_ = null; } - return keyBuilder_; + return editBuilder_; } // repeated bytes key_value_bytes = 2; @@ -24034,61 +24034,61 @@ public final class AdminProtos { "\002(\0132\031.hbase.pb.RegionSpecifier\022+\n\010region" + "_b\030\002 \002(\0132\031.hbase.pb.RegionSpecifier\022\027\n\010f" + "orcible\030\003 \001(\010:\005false\022\032\n\022master_system_ti" + - "me\030\004 \001(\004\"\026\n\024MergeRegionsResponse\"a\n\010WALE" + - "ntry\022\035\n\003key\030\001 \002(\0132\020.hbase.pb.WALKey\022\027\n\017k" + - "ey_value_bytes\030\002 \003(\014\022\035\n\025associated_cell_", - "count\030\003 \001(\005\"\242\001\n\030ReplicateWALEntryRequest" + - "\022!\n\005entry\030\001 \003(\0132\022.hbase.pb.WALEntry\022\034\n\024r" + - "eplicationClusterId\030\002 \001(\t\022\"\n\032sourceBaseN" + - "amespaceDirPath\030\003 \001(\t\022!\n\031sourceHFileArch" + - "iveDirPath\030\004 \001(\t\"\033\n\031ReplicateWALEntryRes" + - "ponse\"\026\n\024RollWALWriterRequest\"0\n\025RollWAL" + - "WriterResponse\022\027\n\017region_to_flush\030\001 \003(\014\"" + - "#\n\021StopServerRequest\022\016\n\006reason\030\001 \002(\t\"\024\n\022" + - "StopServerResponse\"\026\n\024GetServerInfoReque" + - "st\"K\n\nServerInfo\022)\n\013server_name\030\001 \002(\0132\024.", - "hbase.pb.ServerName\022\022\n\nwebui_port\030\002 \001(\r\"" + - "B\n\025GetServerInfoResponse\022)\n\013server_info\030" + - "\001 \002(\0132\024.hbase.pb.ServerInfo\"\034\n\032UpdateCon" + - "figurationRequest\"\035\n\033UpdateConfiguration" + - "Response2\207\013\n\014AdminService\022P\n\rGetRegionIn" + - "fo\022\036.hbase.pb.GetRegionInfoRequest\032\037.hba" + - "se.pb.GetRegionInfoResponse\022M\n\014GetStoreF" + - "ile\022\035.hbase.pb.GetStoreFileRequest\032\036.hba" + - "se.pb.GetStoreFileResponse\022V\n\017GetOnlineR" + - "egion\022 .hbase.pb.GetOnlineRegionRequest\032", - "!.hbase.pb.GetOnlineRegionResponse\022G\n\nOp" + - "enRegion\022\033.hbase.pb.OpenRegionRequest\032\034." + - "hbase.pb.OpenRegionResponse\022M\n\014WarmupReg" + - "ion\022\035.hbase.pb.WarmupRegionRequest\032\036.hba" + - "se.pb.WarmupRegionResponse\022J\n\013CloseRegio" + - "n\022\034.hbase.pb.CloseRegionRequest\032\035.hbase." + - "pb.CloseRegionResponse\022J\n\013FlushRegion\022\034." + - "hbase.pb.FlushRegionRequest\032\035.hbase.pb.F" + - "lushRegionResponse\022J\n\013SplitRegion\022\034.hbas" + - "e.pb.SplitRegionRequest\032\035.hbase.pb.Split", - "RegionResponse\022P\n\rCompactRegion\022\036.hbase." + - "pb.CompactRegionRequest\032\037.hbase.pb.Compa" + - "ctRegionResponse\022M\n\014MergeRegions\022\035.hbase" + - ".pb.MergeRegionsRequest\032\036.hbase.pb.Merge" + - "RegionsResponse\022\\\n\021ReplicateWALEntry\022\".h" + - "base.pb.ReplicateWALEntryRequest\032#.hbase" + - ".pb.ReplicateWALEntryResponse\022Q\n\006Replay\022" + - "\".hbase.pb.ReplicateWALEntryRequest\032#.hb" + - "ase.pb.ReplicateWALEntryResponse\022P\n\rRoll" + - "WALWriter\022\036.hbase.pb.RollWALWriterReques", - "t\032\037.hbase.pb.RollWALWriterResponse\022P\n\rGe" + - "tServerInfo\022\036.hbase.pb.GetServerInfoRequ" + - "est\032\037.hbase.pb.GetServerInfoResponse\022G\n\n" + - "StopServer\022\033.hbase.pb.StopServerRequest\032" + - "\034.hbase.pb.StopServerResponse\022_\n\022UpdateF" + - "avoredNodes\022#.hbase.pb.UpdateFavoredNode" + - "sRequest\032$.hbase.pb.UpdateFavoredNodesRe" + - "sponse\022b\n\023UpdateConfiguration\022$.hbase.pb" + - ".UpdateConfigurationRequest\032%.hbase.pb.U" + - "pdateConfigurationResponseBA\n*org.apache", - ".hadoop.hbase.protobuf.generatedB\013AdminP" + - "rotosH\001\210\001\001\240\001\001" + "me\030\004 \001(\004\"\026\n\024MergeRegionsResponse\"c\n\010WALE" + + "ntry\022\037\n\004edit\030\001 \002(\0132\021.hbase.pb.WALEdit\022\027\n" + + "\017key_value_bytes\030\002 \003(\014\022\035\n\025associated_cel", + "l_count\030\003 \001(\005\"\242\001\n\030ReplicateWALEntryReque" + + "st\022!\n\005entry\030\001 \003(\0132\022.hbase.pb.WALEntry\022\034\n" + + "\024replicationClusterId\030\002 \001(\t\022\"\n\032sourceBas" + + "eNamespaceDirPath\030\003 \001(\t\022!\n\031sourceHFileAr" + + "chiveDirPath\030\004 \001(\t\"\033\n\031ReplicateWALEntryR" + + "esponse\"\026\n\024RollWALWriterRequest\"0\n\025RollW" + + "ALWriterResponse\022\027\n\017region_to_flush\030\001 \003(" + + "\014\"#\n\021StopServerRequest\022\016\n\006reason\030\001 \002(\t\"\024" + + "\n\022StopServerResponse\"\026\n\024GetServerInfoReq" + + "uest\"K\n\nServerInfo\022)\n\013server_name\030\001 \002(\0132", + "\024.hbase.pb.ServerName\022\022\n\nwebui_port\030\002 \001(" + + "\r\"B\n\025GetServerInfoResponse\022)\n\013server_inf" + + "o\030\001 \002(\0132\024.hbase.pb.ServerInfo\"\034\n\032UpdateC" + + "onfigurationRequest\"\035\n\033UpdateConfigurati" + + "onResponse2\207\013\n\014AdminService\022P\n\rGetRegion" + + "Info\022\036.hbase.pb.GetRegionInfoRequest\032\037.h" + + "base.pb.GetRegionInfoResponse\022M\n\014GetStor" + + "eFile\022\035.hbase.pb.GetStoreFileRequest\032\036.h" + + "base.pb.GetStoreFileResponse\022V\n\017GetOnlin" + + "eRegion\022 .hbase.pb.GetOnlineRegionReques", + "t\032!.hbase.pb.GetOnlineRegionResponse\022G\n\n" + + "OpenRegion\022\033.hbase.pb.OpenRegionRequest\032" + + "\034.hbase.pb.OpenRegionResponse\022M\n\014WarmupR" + + "egion\022\035.hbase.pb.WarmupRegionRequest\032\036.h" + + "base.pb.WarmupRegionResponse\022J\n\013CloseReg" + + "ion\022\034.hbase.pb.CloseRegionRequest\032\035.hbas" + + "e.pb.CloseRegionResponse\022J\n\013FlushRegion\022" + + "\034.hbase.pb.FlushRegionRequest\032\035.hbase.pb" + + ".FlushRegionResponse\022J\n\013SplitRegion\022\034.hb" + + "ase.pb.SplitRegionRequest\032\035.hbase.pb.Spl", + "itRegionResponse\022P\n\rCompactRegion\022\036.hbas" + + "e.pb.CompactRegionRequest\032\037.hbase.pb.Com" + + "pactRegionResponse\022M\n\014MergeRegions\022\035.hba" + + "se.pb.MergeRegionsRequest\032\036.hbase.pb.Mer" + + "geRegionsResponse\022\\\n\021ReplicateWALEntry\022\"" + + ".hbase.pb.ReplicateWALEntryRequest\032#.hba" + + "se.pb.ReplicateWALEntryResponse\022Q\n\006Repla" + + "y\022\".hbase.pb.ReplicateWALEntryRequest\032#." + + "hbase.pb.ReplicateWALEntryResponse\022P\n\rRo" + + "llWALWriter\022\036.hbase.pb.RollWALWriterRequ", + "est\032\037.hbase.pb.RollWALWriterResponse\022P\n\r" + + "GetServerInfo\022\036.hbase.pb.GetServerInfoRe" + + "quest\032\037.hbase.pb.GetServerInfoResponse\022G" + + "\n\nStopServer\022\033.hbase.pb.StopServerReques" + + "t\032\034.hbase.pb.StopServerResponse\022_\n\022Updat" + + "eFavoredNodes\022#.hbase.pb.UpdateFavoredNo" + + "desRequest\032$.hbase.pb.UpdateFavoredNodes" + + "Response\022b\n\023UpdateConfiguration\022$.hbase." + + "pb.UpdateConfigurationRequest\032%.hbase.pb" + + ".UpdateConfigurationResponseBA\n*org.apac", + "he.hadoop.hbase.protobuf.generatedB\013Admi" + + "nProtosH\001\210\001\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { @@ -24244,7 +24244,7 @@ public final class AdminProtos { internal_static_hbase_pb_WALEntry_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_WALEntry_descriptor, - new java.lang.String[] { "Key", "KeyValueBytes", "AssociatedCellCount", }); + new java.lang.String[] { "Edit", "KeyValueBytes", "AssociatedCellCount", }); internal_static_hbase_pb_ReplicateWALEntryRequest_descriptor = getDescriptor().getMessageTypes().get(23); internal_static_hbase_pb_ReplicateWALEntryRequest_fieldAccessorTable = new diff --git hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WALProtos.java hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WALProtos.java index a675b12..c3f2288 100644 --- hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WALProtos.java +++ hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WALProtos.java @@ -1046,7 +1046,7 @@ public final class WALProtos { // @@protoc_insertion_point(class_scope:hbase.pb.WALHeader) } - public interface WALKeyOrBuilder + public interface WALEditOrBuilder extends com.google.protobuf.MessageOrBuilder { // required bytes encoded_region_name = 1; @@ -1248,30 +1248,29 @@ public final class WALProtos { long getOrigSequenceNumber(); } /** - * Protobuf type {@code hbase.pb.WALKey} + * Protobuf type {@code hbase.pb.WALEdit} * *
    *
-   * Protocol buffer version of WALKey; see WALKey comment, not really a key but WALEdit header
-   * for some KVs
+   * Protocol buffer version of WALEdit;
    * 
*/ - public static final class WALKey extends + public static final class WALEdit extends com.google.protobuf.GeneratedMessage - implements WALKeyOrBuilder { - // Use WALKey.newBuilder() to construct. - private WALKey(com.google.protobuf.GeneratedMessage.Builder builder) { + implements WALEditOrBuilder { + // Use WALEdit.newBuilder() to construct. + private WALEdit(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } - private WALKey(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private WALEdit(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - private static final WALKey defaultInstance; - public static WALKey getDefaultInstance() { + private static final WALEdit defaultInstance; + public static WALEdit getDefaultInstance() { return defaultInstance; } - public WALKey getDefaultInstanceForType() { + public WALEdit getDefaultInstanceForType() { return defaultInstance; } @@ -1281,7 +1280,7 @@ public final class WALProtos { getUnknownFields() { return this.unknownFields; } - private WALKey( + private WALEdit( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { @@ -1393,28 +1392,28 @@ public final class WALProtos { } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_WALKey_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_WALEdit_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_WALKey_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_WALEdit_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public WALKey parsePartialFrom( + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public WALEdit parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new WALKey(input, extensionRegistry); + return new WALEdit(input, extensionRegistry); } }; @java.lang.Override - public com.google.protobuf.Parser getParserForType() { + public com.google.protobuf.Parser getParserForType() { return PARSER; } @@ -1857,10 +1856,10 @@ public final class WALProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey other = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey) obj; + org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit other = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit) obj; boolean result = true; result = result && (hasEncodedRegionName() == other.hasEncodedRegionName()); @@ -1974,53 +1973,53 @@ public final class WALProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -2029,7 +2028,7 @@ public final class WALProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -2041,30 +2040,29 @@ public final class WALProtos { return builder; } /** - * Protobuf type {@code hbase.pb.WALKey} + * Protobuf type {@code hbase.pb.WALEdit} * *
      *
-     * Protocol buffer version of WALKey; see WALKey comment, not really a key but WALEdit header
-     * for some KVs
+     * Protocol buffer version of WALEdit;
      * 
*/ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKeyOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEditOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_WALKey_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_WALEdit_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_WALKey_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_WALEdit_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit.Builder.class); } - // Construct using org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -2130,23 +2128,23 @@ public final class WALProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_WALKey_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_WALEdit_descriptor; } - public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey build() { - org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit build() { + org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey(this); + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { @@ -2213,16 +2211,16 @@ public final class WALProtos { } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit.getDefaultInstance()) return this; if (other.hasEncodedRegionName()) { setEncodedRegionName(other.getEncodedRegionName()); } @@ -2348,11 +2346,11 @@ public final class WALProtos { com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parsedMessage = null; + org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey) e.getUnfinishedMessage(); + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { @@ -3410,15 +3408,15 @@ public final class WALProtos { return this; } - // @@protoc_insertion_point(builder_scope:hbase.pb.WALKey) + // @@protoc_insertion_point(builder_scope:hbase.pb.WALEdit) } static { - defaultInstance = new WALKey(true); + defaultInstance = new WALEdit(true); defaultInstance.initFields(); } - // @@protoc_insertion_point(class_scope:hbase.pb.WALKey) + // @@protoc_insertion_point(class_scope:hbase.pb.WALEdit) } public interface FamilyScopeOrBuilder @@ -11925,10 +11923,10 @@ public final class WALProtos { com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_WALHeader_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor - internal_static_hbase_pb_WALKey_descriptor; + internal_static_hbase_pb_WALEdit_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_hbase_pb_WALKey_fieldAccessorTable; + internal_static_hbase_pb_WALEdit_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_FamilyScope_descriptor; private static @@ -11982,52 +11980,52 @@ public final class WALProtos { "ALHeader\022\027\n\017has_compression\030\001 \001(\010\022\026\n\016enc" + "ryption_key\030\002 \001(\014\022\033\n\023has_tag_compression" + "\030\003 \001(\010\022\027\n\017writer_cls_name\030\004 \001(\t\022\033\n\023cell_" + - "codec_cls_name\030\005 \001(\t\"\273\002\n\006WALKey\022\033\n\023encod" + - "ed_region_name\030\001 \002(\014\022\022\n\ntable_name\030\002 \002(\014" + - "\022\033\n\023log_sequence_number\030\003 \002(\004\022\022\n\nwrite_t" + - "ime\030\004 \002(\004\022&\n\ncluster_id\030\005 \001(\0132\016.hbase.pb" + - ".UUIDB\002\030\001\022%\n\006scopes\030\006 \003(\0132\025.hbase.pb.Fam" + - "ilyScope\022\032\n\022following_kv_count\030\007 \001(\r\022#\n\013", - "cluster_ids\030\010 \003(\0132\016.hbase.pb.UUID\022\022\n\nnon" + - "ceGroup\030\t \001(\004\022\r\n\005nonce\030\n \001(\004\022\034\n\024orig_seq" + - "uence_number\030\013 \001(\004\"F\n\013FamilyScope\022\016\n\006fam" + - "ily\030\001 \002(\014\022\'\n\nscope_type\030\002 \002(\0162\023.hbase.pb" + - ".ScopeType\"\276\001\n\024CompactionDescriptor\022\022\n\nt" + - "able_name\030\001 \002(\014\022\033\n\023encoded_region_name\030\002" + - " \002(\014\022\023\n\013family_name\030\003 \002(\014\022\030\n\020compaction_" + - "input\030\004 \003(\t\022\031\n\021compaction_output\030\005 \003(\t\022\026" + - "\n\016store_home_dir\030\006 \002(\t\022\023\n\013region_name\030\007 " + - "\001(\014\"\244\003\n\017FlushDescriptor\0225\n\006action\030\001 \002(\0162", - "%.hbase.pb.FlushDescriptor.FlushAction\022\022" + - "\n\ntable_name\030\002 \002(\014\022\033\n\023encoded_region_nam" + - "e\030\003 \002(\014\022\035\n\025flush_sequence_number\030\004 \001(\004\022E" + - "\n\rstore_flushes\030\005 \003(\0132..hbase.pb.FlushDe" + - "scriptor.StoreFlushDescriptor\022\023\n\013region_" + - "name\030\006 \001(\014\032Y\n\024StoreFlushDescriptor\022\023\n\013fa" + - "mily_name\030\001 \002(\014\022\026\n\016store_home_dir\030\002 \002(\t\022" + - "\024\n\014flush_output\030\003 \003(\t\"S\n\013FlushAction\022\017\n\013" + - "START_FLUSH\020\000\022\020\n\014COMMIT_FLUSH\020\001\022\017\n\013ABORT" + - "_FLUSH\020\002\022\020\n\014CANNOT_FLUSH\020\003\"q\n\017StoreDescr", - "iptor\022\023\n\013family_name\030\001 \002(\014\022\026\n\016store_home" + - "_dir\030\002 \002(\t\022\022\n\nstore_file\030\003 \003(\t\022\035\n\025store_" + - "file_size_bytes\030\004 \001(\004\"\237\001\n\022BulkLoadDescri" + - "ptor\022\'\n\ntable_name\030\001 \002(\0132\023.hbase.pb.Tabl" + - "eName\022\033\n\023encoded_region_name\030\002 \002(\014\022)\n\006st" + - "ores\030\003 \003(\0132\031.hbase.pb.StoreDescriptor\022\030\n" + - "\020bulkload_seq_num\030\004 \002(\003\"\272\002\n\025RegionEventD" + - "escriptor\022=\n\nevent_type\030\001 \002(\0162).hbase.pb" + - ".RegionEventDescriptor.EventType\022\022\n\ntabl" + - "e_name\030\002 \002(\014\022\033\n\023encoded_region_name\030\003 \002(", - "\014\022\033\n\023log_sequence_number\030\004 \001(\004\022)\n\006stores" + - "\030\005 \003(\0132\031.hbase.pb.StoreDescriptor\022$\n\006ser" + - "ver\030\006 \001(\0132\024.hbase.pb.ServerName\022\023\n\013regio" + - "n_name\030\007 \001(\014\".\n\tEventType\022\017\n\013REGION_OPEN" + - "\020\000\022\020\n\014REGION_CLOSE\020\001\"\014\n\nWALTrailer*d\n\tSc" + - "opeType\022\033\n\027REPLICATION_SCOPE_LOCAL\020\000\022\034\n\030" + - "REPLICATION_SCOPE_GLOBAL\020\001\022\034\n\030REPLICATIO" + - "N_SCOPE_SERIAL\020\002B?\n*org.apache.hadoop.hb" + - "ase.protobuf.generatedB\tWALProtosH\001\210\001\000\240\001" + - "\001" + "codec_cls_name\030\005 \001(\t\"\274\002\n\007WALEdit\022\033\n\023enco" + + "ded_region_name\030\001 \002(\014\022\022\n\ntable_name\030\002 \002(" + + "\014\022\033\n\023log_sequence_number\030\003 \002(\004\022\022\n\nwrite_" + + "time\030\004 \002(\004\022&\n\ncluster_id\030\005 \001(\0132\016.hbase.p" + + "b.UUIDB\002\030\001\022%\n\006scopes\030\006 \003(\0132\025.hbase.pb.Fa" + + "milyScope\022\032\n\022following_kv_count\030\007 \001(\r\022#\n", + "\013cluster_ids\030\010 \003(\0132\016.hbase.pb.UUID\022\022\n\nno" + + "nceGroup\030\t \001(\004\022\r\n\005nonce\030\n \001(\004\022\034\n\024orig_se" + + "quence_number\030\013 \001(\004\"F\n\013FamilyScope\022\016\n\006fa" + + "mily\030\001 \002(\014\022\'\n\nscope_type\030\002 \002(\0162\023.hbase.p" + + "b.ScopeType\"\276\001\n\024CompactionDescriptor\022\022\n\n" + + "table_name\030\001 \002(\014\022\033\n\023encoded_region_name\030" + + "\002 \002(\014\022\023\n\013family_name\030\003 \002(\014\022\030\n\020compaction" + + "_input\030\004 \003(\t\022\031\n\021compaction_output\030\005 \003(\t\022" + + "\026\n\016store_home_dir\030\006 \002(\t\022\023\n\013region_name\030\007" + + " \001(\014\"\244\003\n\017FlushDescriptor\0225\n\006action\030\001 \002(\016", + "2%.hbase.pb.FlushDescriptor.FlushAction\022" + + "\022\n\ntable_name\030\002 \002(\014\022\033\n\023encoded_region_na" + + "me\030\003 \002(\014\022\035\n\025flush_sequence_number\030\004 \001(\004\022" + + "E\n\rstore_flushes\030\005 \003(\0132..hbase.pb.FlushD" + + "escriptor.StoreFlushDescriptor\022\023\n\013region" + + "_name\030\006 \001(\014\032Y\n\024StoreFlushDescriptor\022\023\n\013f" + + "amily_name\030\001 \002(\014\022\026\n\016store_home_dir\030\002 \002(\t" + + "\022\024\n\014flush_output\030\003 \003(\t\"S\n\013FlushAction\022\017\n" + + "\013START_FLUSH\020\000\022\020\n\014COMMIT_FLUSH\020\001\022\017\n\013ABOR" + + "T_FLUSH\020\002\022\020\n\014CANNOT_FLUSH\020\003\"q\n\017StoreDesc", + "riptor\022\023\n\013family_name\030\001 \002(\014\022\026\n\016store_hom" + + "e_dir\030\002 \002(\t\022\022\n\nstore_file\030\003 \003(\t\022\035\n\025store" + + "_file_size_bytes\030\004 \001(\004\"\237\001\n\022BulkLoadDescr" + + "iptor\022\'\n\ntable_name\030\001 \002(\0132\023.hbase.pb.Tab" + + "leName\022\033\n\023encoded_region_name\030\002 \002(\014\022)\n\006s" + + "tores\030\003 \003(\0132\031.hbase.pb.StoreDescriptor\022\030" + + "\n\020bulkload_seq_num\030\004 \002(\003\"\272\002\n\025RegionEvent" + + "Descriptor\022=\n\nevent_type\030\001 \002(\0162).hbase.p" + + "b.RegionEventDescriptor.EventType\022\022\n\ntab" + + "le_name\030\002 \002(\014\022\033\n\023encoded_region_name\030\003 \002", + "(\014\022\033\n\023log_sequence_number\030\004 \001(\004\022)\n\006store" + + "s\030\005 \003(\0132\031.hbase.pb.StoreDescriptor\022$\n\006se" + + "rver\030\006 \001(\0132\024.hbase.pb.ServerName\022\023\n\013regi" + + "on_name\030\007 \001(\014\".\n\tEventType\022\017\n\013REGION_OPE" + + "N\020\000\022\020\n\014REGION_CLOSE\020\001\"\014\n\nWALTrailer*d\n\tS" + + "copeType\022\033\n\027REPLICATION_SCOPE_LOCAL\020\000\022\034\n" + + "\030REPLICATION_SCOPE_GLOBAL\020\001\022\034\n\030REPLICATI" + + "ON_SCOPE_SERIAL\020\002B?\n*org.apache.hadoop.h" + + "base.protobuf.generatedB\tWALProtosH\001\210\001\000\240" + + "\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { @@ -12040,11 +12038,11 @@ public final class WALProtos { com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_WALHeader_descriptor, new java.lang.String[] { "HasCompression", "EncryptionKey", "HasTagCompression", "WriterClsName", "CellCodecClsName", }); - internal_static_hbase_pb_WALKey_descriptor = + internal_static_hbase_pb_WALEdit_descriptor = getDescriptor().getMessageTypes().get(1); - internal_static_hbase_pb_WALKey_fieldAccessorTable = new + internal_static_hbase_pb_WALEdit_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_WALKey_descriptor, + internal_static_hbase_pb_WALEdit_descriptor, new java.lang.String[] { "EncodedRegionName", "TableName", "LogSequenceNumber", "WriteTime", "ClusterId", "Scopes", "FollowingKvCount", "ClusterIds", "NonceGroup", "Nonce", "OrigSequenceNumber", }); internal_static_hbase_pb_FamilyScope_descriptor = getDescriptor().getMessageTypes().get(2); diff --git hbase-protocol/src/main/protobuf/Admin.proto hbase-protocol/src/main/protobuf/Admin.proto index e905340..6b442f1 100644 --- hbase-protocol/src/main/protobuf/Admin.proto +++ hbase-protocol/src/main/protobuf/Admin.proto @@ -194,7 +194,7 @@ message MergeRegionsResponse { // Protocol buffer version of WAL for replication message WALEntry { - required WALKey key = 1; + required WALEdit edit = 1; // Following may be null if the KVs/Cells are carried along the side in a cellblock (See // RPC for more on cellblocks). If Cells/KVs are in a cellblock, this next field is null // and associated_cell_count has count of Cells associated w/ this WALEntry diff --git hbase-protocol/src/main/protobuf/WAL.proto hbase-protocol/src/main/protobuf/WAL.proto index 2494977..5eba4b3 100644 --- hbase-protocol/src/main/protobuf/WAL.proto +++ hbase-protocol/src/main/protobuf/WAL.proto @@ -34,10 +34,9 @@ message WALHeader { } /* - * Protocol buffer version of WALKey; see WALKey comment, not really a key but WALEdit header - * for some KVs + * Protocol buffer version of WALEdit; */ -message WALKey { +message WALEdit { required bytes encoded_region_name = 1; required bytes table_name = 2; required uint64 log_sequence_number = 3; @@ -62,14 +61,6 @@ message WALKey { optional uint64 nonceGroup = 9; optional uint64 nonce = 10; optional uint64 orig_sequence_number = 11; - -/* - optional CustomEntryType custom_entry_type = 9; - - enum CustomEntryType { - COMPACTION = 0; - } -*/ } enum ScopeType { diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java index 8cb2237..bec0b25 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java @@ -110,7 +110,7 @@ public class ReplicationProtbufUtil { for (Entry entry: entries) { entryBuilder.clear(); // TODO: this duplicates a lot in WALKey#getBuilder - WALProtos.WALKey.Builder keyBuilder = entryBuilder.getKeyBuilder(); + WALProtos.WALEdit.Builder keyBuilder = entryBuilder.getEditBuilder(); WALKey key = entry.getKey(); keyBuilder.setEncodedRegionName( ByteStringer.wrap(encodedRegionName == null diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java index 3859d18..aeb98ca 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java @@ -1847,7 +1847,7 @@ public class RSRpcServices implements HBaseRPCErrorHandler, // empty input return ReplicateWALEntryResponse.newBuilder().build(); } - ByteString regionName = entries.get(0).getKey().getEncodedRegionName(); + ByteString regionName = entries.get(0).getEdit().getEncodedRegionName(); Region region = regionServer.getRegionByEncodedName(regionName.toStringUtf8()); RegionCoprocessorHost coprocessorHost = ServerRegionReplicaUtil.isDefaultReplica(region.getRegionInfo()) @@ -1860,19 +1860,19 @@ public class RSRpcServices implements HBaseRPCErrorHandler, Durability durability = isPrimary ? Durability.USE_DEFAULT : Durability.SKIP_WAL; for (WALEntry entry : entries) { - if (!regionName.equals(entry.getKey().getEncodedRegionName())) { + if (!regionName.equals(entry.getEdit().getEncodedRegionName())) { throw new NotServingRegionException("Replay request contains entries from multiple " + "regions. First region:" + regionName.toStringUtf8() + " , other region:" - + entry.getKey().getEncodedRegionName()); + + entry.getEdit().getEncodedRegionName()); } if (regionServer.nonceManager != null && isPrimary) { - long nonceGroup = entry.getKey().hasNonceGroup() - ? entry.getKey().getNonceGroup() : HConstants.NO_NONCE; - long nonce = entry.getKey().hasNonce() ? entry.getKey().getNonce() : HConstants.NO_NONCE; + long nonceGroup = entry.getEdit().hasNonceGroup() + ? entry.getEdit().getNonceGroup() : HConstants.NO_NONCE; + long nonce = entry.getEdit().hasNonce() ? entry.getEdit().getNonce() : HConstants.NO_NONCE; regionServer.nonceManager.reportOperationFromWal( nonceGroup, nonce, - entry.getKey().getWriteTime()); + entry.getEdit().getWriteTime()); } Pair walEntry = (coprocessorHost == null) ? null : new Pair(); @@ -1889,8 +1889,8 @@ public class RSRpcServices implements HBaseRPCErrorHandler, walEntries.add(walEntry); } if(edits!=null && !edits.isEmpty()) { - long replaySeqId = (entry.getKey().hasOrigSequenceNumber()) ? - entry.getKey().getOrigSequenceNumber() : entry.getKey().getLogSequenceNumber(); + long replaySeqId = (entry.getEdit().hasOrigSequenceNumber()) ? + entry.getEdit().getOrigSequenceNumber() : entry.getEdit().getLogSequenceNumber(); OperationStatus[] result = doReplayBatchOp(region, edits, replaySeqId); // check if it's a partial success for (int i = 0; result != null && i < result.length; i++) { diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java index 0755358..b032025 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java @@ -39,7 +39,6 @@ import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.WALProtos; import org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader.Builder; -import org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey; import org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.wal.WAL.Entry; @@ -93,7 +92,7 @@ public class ProtobufLogReader extends ReaderBase { writerClsNames.add(ProtobufLogWriter.class.getSimpleName()); writerClsNames.add(AsyncProtobufLogWriter.class.getSimpleName()); } - + // cell codec classname private String codecClsName = null; @@ -102,12 +101,12 @@ public class ProtobufLogReader extends ReaderBase { SUCCESS, UNKNOWN_WRITER_CLS // name of writer class isn't recognized } - + // context for WALHdr carrying information such as Cell Codec classname static class WALHdrContext { WALHdrResult result; String cellCodecClsName; - + WALHdrContext(WALHdrResult result, String cellCodecClsName) { this.result = result; this.cellCodecClsName = cellCodecClsName; @@ -161,7 +160,7 @@ public class ProtobufLogReader extends ReaderBase { public List getWriterClsNames() { return writerClsNames; } - + /* * Returns the cell codec classname */ @@ -219,9 +218,9 @@ public class ProtobufLogReader extends ReaderBase { LOG.trace("After reading the trailer: walEditsStopOffset: " + this.walEditsStopOffset + ", fileLength: " + this.fileLength + ", " + "trailerPresent: " + trailerPresent); } - + codecClsName = hdrCtxt.getCellCodecClsName(); - + return hdrCtxt.getCellCodecClsName(); } @@ -287,7 +286,7 @@ public class ProtobufLogReader extends ReaderBase { protected void initAfterCompression() throws IOException { initAfterCompression(null); } - + @Override protected void initAfterCompression(String cellCodecClsName) throws IOException { WALCellCodec codec = getCodec(this.conf, cellCodecClsName, this.compressionContext); @@ -315,7 +314,7 @@ public class ProtobufLogReader extends ReaderBase { if (trailerPresent && originalPosition > 0 && originalPosition == this.walEditsStopOffset) { return false; } - WALKey.Builder builder = WALKey.newBuilder(); + WALProtos.WALEdit.Builder builder = WALProtos.WALEdit.newBuilder(); long size = 0; try { long available = -1; @@ -345,13 +344,13 @@ public class ProtobufLogReader extends ReaderBase { throw new EOFException("Partial PB while reading WAL, " + "probably an unexpected EOF, ignoring"); } - WALKey walKey = builder.build(); - entry.getKey().readFieldsFromPb(walKey, this.byteStringUncompressor); - if (!walKey.hasFollowingKvCount() || 0 == walKey.getFollowingKvCount()) { + WALProtos.WALEdit walEdit = builder.build(); + entry.getKey().readFieldsFromPb(walEdit, this.byteStringUncompressor); + if (!walEdit.hasFollowingKvCount() || 0 == walEdit.getFollowingKvCount()) { LOG.trace("WALKey has no KVs that follow it; trying the next one"); continue; } - int expectedCells = walKey.getFollowingKvCount(); + int expectedCells = walEdit.getFollowingKvCount(); long posBefore = this.inputStream.getPos(); try { int actualCells = entry.getEdit().readFromCells(cellDecoder, expectedCells); diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSink.java hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSink.java index 9e7b3af..a47d7a9 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSink.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSink.java @@ -160,7 +160,7 @@ public class ReplicationSink { for (WALEntry entry : entries) { TableName table = - TableName.valueOf(entry.getKey().getTableName().toByteArray()); + TableName.valueOf(entry.getEdit().getTableName().toByteArray()); Cell previousCell = null; Mutation m = null; int count = entry.getAssociatedCellCount(); @@ -185,7 +185,7 @@ public class ReplicationSink { cell.getRowLength()) : new Put(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength()); List clusterIds = new ArrayList(); - for (HBaseProtos.UUID clusterId : entry.getKey().getClusterIdsList()) { + for (HBaseProtos.UUID clusterId : entry.getEdit().getClusterIdsList()) { clusterIds.add(toUUID(clusterId)); } m.setClusterIds(clusterIds); @@ -222,7 +222,7 @@ public class ReplicationSink { } int size = entries.size(); - this.metrics.setAgeOfLastAppliedOp(entries.get(size - 1).getKey().getWriteTime()); + this.metrics.setAgeOfLastAppliedOp(entries.get(size - 1).getEdit().getWriteTime()); this.metrics.applyBatch(size + hfilesReplicated, hfilesReplicated); this.totalReplicatedEdits.addAndGet(totalReplicated); } catch (IOException ex) { diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALKey.java hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALKey.java index 2bac2ad..5ca943b 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALKey.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALKey.java @@ -558,10 +558,10 @@ public class WALKey implements SequenceId, Comparable { this.encodedRegionName = encodedRegionName; } - public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.Builder getBuilder( + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit.Builder getBuilder( WALCellCodec.ByteStringCompressor compressor) throws IOException { - org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.Builder builder = - org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.newBuilder(); + org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit.Builder builder = + org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit.newBuilder(); if (compressionContext == null) { builder.setEncodedRegionName(ByteStringer.wrap(this.encodedRegionName)); builder.setTableName(ByteStringer.wrap(this.tablename.getName())); @@ -599,48 +599,48 @@ public class WALKey implements SequenceId, Comparable { return builder; } - public void readFieldsFromPb(org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey walKey, + public void readFieldsFromPb(org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit walEdit, WALCellCodec.ByteStringUncompressor uncompressor) throws IOException { if (this.compressionContext != null) { this.encodedRegionName = uncompressor.uncompress( - walKey.getEncodedRegionName(), compressionContext.regionDict); + walEdit.getEncodedRegionName(), compressionContext.regionDict); byte[] tablenameBytes = uncompressor.uncompress( - walKey.getTableName(), compressionContext.tableDict); + walEdit.getTableName(), compressionContext.tableDict); this.tablename = TableName.valueOf(tablenameBytes); } else { - this.encodedRegionName = walKey.getEncodedRegionName().toByteArray(); - this.tablename = TableName.valueOf(walKey.getTableName().toByteArray()); + this.encodedRegionName = walEdit.getEncodedRegionName().toByteArray(); + this.tablename = TableName.valueOf(walEdit.getTableName().toByteArray()); } clusterIds.clear(); - if (walKey.hasClusterId()) { + if (walEdit.hasClusterId()) { //When we are reading the older log (0.95.1 release) //This is definitely the originating cluster - clusterIds.add(new UUID(walKey.getClusterId().getMostSigBits(), walKey.getClusterId() + clusterIds.add(new UUID(walEdit.getClusterId().getMostSigBits(), walEdit.getClusterId() .getLeastSigBits())); } - for (HBaseProtos.UUID clusterId : walKey.getClusterIdsList()) { + for (HBaseProtos.UUID clusterId : walEdit.getClusterIdsList()) { clusterIds.add(new UUID(clusterId.getMostSigBits(), clusterId.getLeastSigBits())); } - if (walKey.hasNonceGroup()) { - this.nonceGroup = walKey.getNonceGroup(); + if (walEdit.hasNonceGroup()) { + this.nonceGroup = walEdit.getNonceGroup(); } - if (walKey.hasNonce()) { - this.nonce = walKey.getNonce(); + if (walEdit.hasNonce()) { + this.nonce = walEdit.getNonce(); } this.replicationScope = null; - if (walKey.getScopesCount() > 0) { + if (walEdit.getScopesCount() > 0) { this.replicationScope = new TreeMap(Bytes.BYTES_COMPARATOR); - for (FamilyScope scope : walKey.getScopesList()) { + for (FamilyScope scope : walEdit.getScopesList()) { byte[] family = (compressionContext == null) ? scope.getFamily().toByteArray() : uncompressor.uncompress(scope.getFamily(), compressionContext.familyDict); this.replicationScope.put(family, scope.getScopeType().getNumber()); } } - setSequenceId(walKey.getLogSequenceNumber()); - this.writeTime = walKey.getWriteTime(); - if(walKey.hasOrigSequenceNumber()) { - this.origLogSeqNum = walKey.getOrigSequenceNumber(); + setSequenceId(walEdit.getLogSequenceNumber()); + this.writeTime = walEdit.getWriteTime(); + if(walEdit.hasOrigSequenceNumber()) { + this.origLogSeqNum = walEdit.getOrigSequenceNumber(); } } } \ No newline at end of file diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java index f0ae6a7..4061d9b 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java @@ -2310,8 +2310,8 @@ public class WALSplitter { return new ArrayList(); } - long replaySeqId = (entry.getKey().hasOrigSequenceNumber()) ? - entry.getKey().getOrigSequenceNumber() : entry.getKey().getLogSequenceNumber(); + long replaySeqId = (entry.getEdit().hasOrigSequenceNumber()) ? + entry.getEdit().getOrigSequenceNumber() : entry.getEdit().getLogSequenceNumber(); int count = entry.getAssociatedCellCount(); List mutations = new ArrayList(); Cell previousCell = null; @@ -2341,9 +2341,9 @@ public class WALSplitter { } else { m = new Put(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength()); // Puts might come from increment or append, thus we need nonces. - long nonceGroup = entry.getKey().hasNonceGroup() - ? entry.getKey().getNonceGroup() : HConstants.NO_NONCE; - long nonce = entry.getKey().hasNonce() ? entry.getKey().getNonce() : HConstants.NO_NONCE; + long nonceGroup = entry.getEdit().hasNonceGroup() + ? entry.getEdit().getNonceGroup() : HConstants.NO_NONCE; + long nonce = entry.getEdit().hasNonce() ? entry.getEdit().getNonce():HConstants.NO_NONCE; mutations.add(new MutationReplay(MutationType.PUT, m, nonceGroup, nonce)); } } @@ -2358,9 +2358,9 @@ public class WALSplitter { // reconstruct WALKey if (logEntry != null) { - org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey walKeyProto = entry.getKey(); + org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALEdit walKeyProto = entry.getEdit(); List clusterIds = new ArrayList(walKeyProto.getClusterIdsCount()); - for (HBaseProtos.UUID uuid : entry.getKey().getClusterIdsList()) { + for (HBaseProtos.UUID uuid : entry.getEdit().getClusterIdsList()) { clusterIds.add(new UUID(uuid.getMostSigBits(), uuid.getLeastSigBits())); } key = new WALKey(walKeyProto.getEncodedRegionName().toByteArray(), TableName.valueOf( diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSink.java hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSink.java index 049ca8e..5fd7417 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSink.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSink.java @@ -59,7 +59,6 @@ import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID; import org.apache.hadoop.hbase.protobuf.generated.WALProtos; -import org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey; import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.ReplicationTests; @@ -381,7 +380,7 @@ public class TestReplicationSink { private WALEntry.Builder createWALEntryBuilder(TableName table) { WALEntry.Builder builder = WALEntry.newBuilder(); builder.setAssociatedCellCount(1); - WALKey.Builder keyBuilder = WALKey.newBuilder(); + WALProtos.WALEdit.Builder keyBuilder = WALProtos.WALEdit.newBuilder(); UUID.Builder uuidBuilder = UUID.newBuilder(); uuidBuilder.setLeastSigBits(HConstants.DEFAULT_CLUSTER_ID.getLeastSignificantBits()); uuidBuilder.setMostSigBits(HConstants.DEFAULT_CLUSTER_ID.getMostSignificantBits()); @@ -390,7 +389,7 @@ public class TestReplicationSink { keyBuilder.setWriteTime(System.currentTimeMillis()); keyBuilder.setEncodedRegionName(ByteStringer.wrap(HConstants.EMPTY_BYTE_ARRAY)); keyBuilder.setLogSequenceNumber(-1); - builder.setKey(keyBuilder.build()); + builder.setEdit(keyBuilder.build()); return builder; } }