From 22a4045a6c47c4669109f1b08821ed2264952219 Mon Sep 17 00:00:00 2001 From: chenheng Date: Thu, 18 Feb 2016 14:08:15 +0800 Subject: [PATCH] HBASE-15128 Disable region splits and merges switch in master --- .../java/org/apache/hadoop/hbase/client/Admin.java | 27 +- .../hbase/client/ConnectionImplementation.java | 13 + .../org/apache/hadoop/hbase/client/HBaseAdmin.java | 32 + .../hadoop/hbase/protobuf/RequestConverter.java | 48 + .../hadoop/hbase/zookeeper/ZooKeeperWatcher.java | 10 + .../hbase/protobuf/generated/MasterProtos.java | 4304 +++++++++++++++----- .../hbase/protobuf/generated/SnapshotProtos.java | 500 +-- .../hbase/protobuf/generated/ZooKeeperProtos.java | 462 ++- hbase-protocol/src/main/protobuf/Master.proto | 36 + hbase-protocol/src/main/protobuf/ZooKeeper.proto | 7 + .../hadoop/hbase/master/AssignmentManager.java | 8 + .../org/apache/hadoop/hbase/master/HMaster.java | 28 + .../hadoop/hbase/master/MasterRpcServices.java | 42 + .../org/apache/hadoop/hbase/util/HBaseFsck.java | 35 + .../zookeeper/SplitOrMergeTrackerManager.java | 149 + .../hbase/client/TestSplitOrMergeStatus.java | 198 + hbase-shell/src/main/ruby/hbase/admin.rb | 32 + hbase-shell/src/main/ruby/shell.rb | 2 + .../ruby/shell/commands/splitormerge_enabled.rb | 41 + .../ruby/shell/commands/splitormerge_switch.rb | 42 + 20 files changed, 4816 insertions(+), 1200 deletions(-) create mode 100644 hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/SplitOrMergeTrackerManager.java create mode 100644 hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSplitOrMergeStatus.java create mode 100644 hbase-shell/src/main/ruby/shell/commands/splitormerge_enabled.rb create mode 100644 hbase-shell/src/main/ruby/shell/commands/splitormerge_switch.rb diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java index d7b52d5..b2543c9 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java @@ -1678,11 +1678,28 @@ public interface Admin extends Abortable, Closeable { List getSecurityCapabilities() throws IOException; /** + * Turn the Split or Merge switches on or off. + * + * @param enabled enabled or not + * @param synchronous If true, it waits until current split() call, if outstanding, to return. + * @param switchTypes switchType list {@link MasterSwitchType} + * @return Previous switch value array + */ + boolean[] setSplitOrMergeEnabled(final boolean enabled, final boolean synchronous, + final MasterSwitchType... switchTypes) throws IOException; + + /** + * Query the current state of the switch + * + * @return true if the switch is enabled, false otherwise. + */ + boolean isSplitOrMergeEnabled(final MasterSwitchType switchType) throws IOException; + + /** * Currently, there are only two compact types: * {@code NORMAL} means do store files compaction; * {@code MOB} means do mob files compaction. * */ - @InterfaceAudience.Public @InterfaceStability.Unstable public enum CompactType { @@ -1692,4 +1709,12 @@ public interface Admin extends Abortable, Closeable { CompactType(int value) {} } + + @InterfaceAudience.Public + @InterfaceStability.Unstable + public enum MasterSwitchType { + SPLIT, + MERGE + } + } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java index dfa9937..9d5f5c6 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java @@ -1742,6 +1742,19 @@ class ConnectionImplementation implements ClusterConnection, Closeable { } @Override + public MasterProtos.SetSplitOrMergeEnabledResponse setSplitOrMergeEnabled( + RpcController controller, MasterProtos.SetSplitOrMergeEnabledRequest request) + throws ServiceException { + return stub.setSplitOrMergeEnabled(controller, request); + } + + @Override + public MasterProtos.IsSplitOrMergeEnabledResponse isSplitOrMergeEnabled(RpcController controller, + MasterProtos.IsSplitOrMergeEnabledRequest request) throws ServiceException { + return stub.isSplitOrMergeEnabled(controller, request); + } + + @Override public IsNormalizerEnabledResponse isNormalizerEnabled(RpcController controller, IsNormalizerEnabledRequest request) throws ServiceException { return stub.isNormalizerEnabled(controller, request); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java index db94ff4..fba037e 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java @@ -89,6 +89,7 @@ import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescripti import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest; @@ -3378,6 +3379,37 @@ public class HBaseAdmin implements Admin { } } + @Override + public boolean[] setSplitOrMergeEnabled(final boolean enabled, final boolean synchronous, + final MasterSwitchType... switchTypes) + throws IOException { + return executeCallable(new MasterCallable(getConnection()) { + @Override + public boolean[] call(int callTimeout) throws ServiceException { + MasterProtos.SetSplitOrMergeEnabledResponse response = master.setSplitOrMergeEnabled(null, + RequestConverter.buildSetSplitOrMergeEnabledRequest(enabled, synchronous, + switchTypes)); + boolean[] result = new boolean[switchTypes.length]; + int i = 0; + for (Boolean prevValue : response.getPrevValueList()) { + result[i++] = prevValue; + } + return result; + } + }); + } + + @Override + public boolean isSplitOrMergeEnabled(final MasterSwitchType switchType) throws IOException { + return executeCallable(new MasterCallable(getConnection()) { + @Override + public Boolean call(int callTimeout) throws ServiceException { + return master.isSplitOrMergeEnabled(null, + RequestConverter.buildIsSplitOrMergeEnabledRequest(switchType)).getEnabled(); + } + }); + } + private HRegionInfo getMobRegionInfo(TableName tableName) { return new HRegionInfo(tableName, Bytes.toBytes(".mob"), HConstants.EMPTY_END_ROW, false, 0); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java index 572d92c..b122737 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java @@ -31,6 +31,7 @@ import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.client.Action; +import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Durability; @@ -76,6 +77,7 @@ import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest; @@ -95,6 +97,7 @@ import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabled import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest; @@ -103,6 +106,7 @@ import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequ import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest; import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest; @@ -1692,4 +1696,48 @@ public final class RequestConverter { public static SetNormalizerRunningRequest buildSetNormalizerRunningRequest(boolean on) { return SetNormalizerRunningRequest.newBuilder().setOn(on).build(); } + + /** + * Creates a protocol buffer IsSplitOrMergeEnabledRequest + * + * @param switchType see{@link Admin.MasterSwitchType} + * @return a IsSplitOrMergeEnabledRequest + */ + public static IsSplitOrMergeEnabledRequest buildIsSplitOrMergeEnabledRequest( + Admin.MasterSwitchType switchType) { + IsSplitOrMergeEnabledRequest.Builder builder = IsSplitOrMergeEnabledRequest.newBuilder(); + builder.setSwitchType(convert(switchType)); + return builder.build(); + } + + /** + * Creates a protocol buffer SetSplitOrMergeEnabledRequest + * + * @param enabled switch is enabled or not + * @param synchronous set switch sync? + * @param switchTypes see{@link Admin.MasterSwitchType}, it is a list. + * @return a SetSplitOrMergeEnabledRequest + */ + public static SetSplitOrMergeEnabledRequest buildSetSplitOrMergeEnabledRequest(boolean enabled, + boolean synchronous, Admin.MasterSwitchType... switchTypes) { + SetSplitOrMergeEnabledRequest.Builder builder = SetSplitOrMergeEnabledRequest.newBuilder(); + builder.setEnabled(enabled); + builder.setSynchronous(synchronous); + for (Admin.MasterSwitchType switchType : switchTypes) { + builder.addSwitchTypes(convert(switchType)); + } + return builder.build(); + } + + private static MasterProtos.MasterSwitchType convert(Admin.MasterSwitchType switchType) { + switch (switchType) { + case SPLIT: + return MasterProtos.MasterSwitchType.SPLIT; + case MERGE: + return MasterProtos.MasterSwitchType.MERGE; + default: + break; + } + throw new UnsupportedOperationException("Unsupport switch type:" + switchType); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java index 36a9bc5..b665353 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java @@ -115,6 +115,8 @@ public class ZooKeeperWatcher implements Watcher, Abortable, Closeable { public String balancerZNode; // znode containing the state of region normalizer private String regionNormalizerZNode; + // znode containing the state of all switches, currently there are split and merge child node. + private String switchZNode; // znode containing the lock for the tables public String tableLockZNode; // znode containing the state of recovering regions @@ -430,6 +432,7 @@ public class ZooKeeperWatcher implements Watcher, Abortable, Closeable { conf.get("zookeeper.znode.balancer", "balancer")); regionNormalizerZNode = ZKUtil.joinZNode(baseZNode, conf.get("zookeeper.znode.regionNormalizer", "normalizer")); + switchZNode = ZKUtil.joinZNode(baseZNode, conf.get("zookeeper.znode.switch", "switch")); tableLockZNode = ZKUtil.joinZNode(baseZNode, conf.get("zookeeper.znode.tableLock", "table-lock")); recoveringRegionsZNode = ZKUtil.joinZNode(baseZNode, @@ -789,4 +792,11 @@ public class ZooKeeperWatcher implements Watcher, Abortable, Closeable { public String getRegionNormalizerZNode() { return regionNormalizerZNode; } + + /** + * @return ZK node for switch + * */ + public String getSwitchZNode() { + return switchZNode; + } } diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java index 043d549..073eba9 100644 --- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java @@ -8,6 +8,88 @@ public final class MasterProtos { public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { } + /** + * Protobuf enum {@code hbase.pb.MasterSwitchType} + */ + public enum MasterSwitchType + implements com.google.protobuf.ProtocolMessageEnum { + /** + * SPLIT = 0; + */ + SPLIT(0, 0), + /** + * MERGE = 1; + */ + MERGE(1, 1), + ; + + /** + * SPLIT = 0; + */ + public static final int SPLIT_VALUE = 0; + /** + * MERGE = 1; + */ + public static final int MERGE_VALUE = 1; + + + public final int getNumber() { return value; } + + public static MasterSwitchType valueOf(int value) { + switch (value) { + case 0: return SPLIT; + case 1: return MERGE; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static com.google.protobuf.Internal.EnumLiteMap + internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public MasterSwitchType findValueByNumber(int number) { + return MasterSwitchType.valueOf(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(index); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.getDescriptor().getEnumTypes().get(0); + } + + private static final MasterSwitchType[] VALUES = values(); + + public static MasterSwitchType valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + return VALUES[desc.getIndex()]; + } + + private final int index; + private final int value; + + private MasterSwitchType(int index, int value) { + this.index = index; + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:hbase.pb.MasterSwitchType) + } + public interface AddColumnRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { @@ -28764,28 +28846,62 @@ public final class MasterProtos { // @@protoc_insertion_point(class_scope:hbase.pb.IsBalancerEnabledResponse) } - public interface NormalizeRequestOrBuilder + public interface SetSplitOrMergeEnabledRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { + + // required bool enabled = 1; + /** + * required bool enabled = 1; + */ + boolean hasEnabled(); + /** + * required bool enabled = 1; + */ + boolean getEnabled(); + + // optional bool synchronous = 2; + /** + * optional bool synchronous = 2; + */ + boolean hasSynchronous(); + /** + * optional bool synchronous = 2; + */ + boolean getSynchronous(); + + // repeated .hbase.pb.MasterSwitchType switch_types = 3; + /** + * repeated .hbase.pb.MasterSwitchType switch_types = 3; + */ + java.util.List getSwitchTypesList(); + /** + * repeated .hbase.pb.MasterSwitchType switch_types = 3; + */ + int getSwitchTypesCount(); + /** + * repeated .hbase.pb.MasterSwitchType switch_types = 3; + */ + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterSwitchType getSwitchTypes(int index); } /** - * Protobuf type {@code hbase.pb.NormalizeRequest} + * Protobuf type {@code hbase.pb.SetSplitOrMergeEnabledRequest} */ - public static final class NormalizeRequest extends + public static final class SetSplitOrMergeEnabledRequest extends com.google.protobuf.GeneratedMessage - implements NormalizeRequestOrBuilder { - // Use NormalizeRequest.newBuilder() to construct. - private NormalizeRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + implements SetSplitOrMergeEnabledRequestOrBuilder { + // Use SetSplitOrMergeEnabledRequest.newBuilder() to construct. + private SetSplitOrMergeEnabledRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } - private NormalizeRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private SetSplitOrMergeEnabledRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - private static final NormalizeRequest defaultInstance; - public static NormalizeRequest getDefaultInstance() { + private static final SetSplitOrMergeEnabledRequest defaultInstance; + public static SetSplitOrMergeEnabledRequest getDefaultInstance() { return defaultInstance; } - public NormalizeRequest getDefaultInstanceForType() { + public SetSplitOrMergeEnabledRequest getDefaultInstanceForType() { return defaultInstance; } @@ -28795,11 +28911,12 @@ public final class MasterProtos { getUnknownFields() { return this.unknownFields; } - private NormalizeRequest( + private SetSplitOrMergeEnabledRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); + int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { @@ -28817,6 +28934,49 @@ public final class MasterProtos { } break; } + case 8: { + bitField0_ |= 0x00000001; + enabled_ = input.readBool(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + synchronous_ = input.readBool(); + break; + } + case 24: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterSwitchType value = org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterSwitchType.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(3, rawValue); + } else { + if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { + switchTypes_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000004; + } + switchTypes_.add(value); + } + break; + } + case 26: { + int length = input.readRawVarint32(); + int oldLimit = input.pushLimit(length); + while(input.getBytesUntilLimit() > 0) { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterSwitchType value = org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterSwitchType.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(3, rawValue); + } else { + if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { + switchTypes_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000004; + } + switchTypes_.add(value); + } + } + input.popLimit(oldLimit); + break; + } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { @@ -28825,44 +28985,109 @@ public final class MasterProtos { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { + if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { + switchTypes_ = java.util.Collections.unmodifiableList(switchTypes_); + } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_NormalizeRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetSplitOrMergeEnabledRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_NormalizeRequest_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetSplitOrMergeEnabledRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public NormalizeRequest parsePartialFrom( + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public SetSplitOrMergeEnabledRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new NormalizeRequest(input, extensionRegistry); + return new SetSplitOrMergeEnabledRequest(input, extensionRegistry); } }; @java.lang.Override - public com.google.protobuf.Parser getParserForType() { + public com.google.protobuf.Parser getParserForType() { return PARSER; } + private int bitField0_; + // required bool enabled = 1; + public static final int ENABLED_FIELD_NUMBER = 1; + private boolean enabled_; + /** + * required bool enabled = 1; + */ + public boolean hasEnabled() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required bool enabled = 1; + */ + public boolean getEnabled() { + return enabled_; + } + + // optional bool synchronous = 2; + public static final int SYNCHRONOUS_FIELD_NUMBER = 2; + private boolean synchronous_; + /** + * optional bool synchronous = 2; + */ + public boolean hasSynchronous() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional bool synchronous = 2; + */ + public boolean getSynchronous() { + return synchronous_; + } + + // repeated .hbase.pb.MasterSwitchType switch_types = 3; + public static final int SWITCH_TYPES_FIELD_NUMBER = 3; + private java.util.List switchTypes_; + /** + * repeated .hbase.pb.MasterSwitchType switch_types = 3; + */ + public java.util.List getSwitchTypesList() { + return switchTypes_; + } + /** + * repeated .hbase.pb.MasterSwitchType switch_types = 3; + */ + public int getSwitchTypesCount() { + return switchTypes_.size(); + } + /** + * repeated .hbase.pb.MasterSwitchType switch_types = 3; + */ + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterSwitchType getSwitchTypes(int index) { + return switchTypes_.get(index); + } + private void initFields() { + enabled_ = false; + synchronous_ = false; + switchTypes_ = java.util.Collections.emptyList(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; + if (!hasEnabled()) { + memoizedIsInitialized = 0; + return false; + } memoizedIsInitialized = 1; return true; } @@ -28870,6 +29095,15 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBool(1, enabled_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBool(2, synchronous_); + } + for (int i = 0; i < switchTypes_.size(); i++) { + output.writeEnum(3, switchTypes_.get(i).getNumber()); + } getUnknownFields().writeTo(output); } @@ -28879,6 +29113,23 @@ public final class MasterProtos { if (size != -1) return size; size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(1, enabled_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(2, synchronous_); + } + { + int dataSize = 0; + for (int i = 0; i < switchTypes_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeEnumSizeNoTag(switchTypes_.get(i).getNumber()); + } + size += dataSize; + size += 1 * switchTypes_.size(); + } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; @@ -28896,12 +29147,24 @@ public final class MasterProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest) obj; + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest) obj; boolean result = true; + result = result && (hasEnabled() == other.hasEnabled()); + if (hasEnabled()) { + result = result && (getEnabled() + == other.getEnabled()); + } + result = result && (hasSynchronous() == other.hasSynchronous()); + if (hasSynchronous()) { + result = result && (getSynchronous() + == other.getSynchronous()); + } + result = result && getSwitchTypesList() + .equals(other.getSwitchTypesList()); result = result && getUnknownFields().equals(other.getUnknownFields()); return result; @@ -28915,58 +29178,70 @@ public final class MasterProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasEnabled()) { + hash = (37 * hash) + ENABLED_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getEnabled()); + } + if (hasSynchronous()) { + hash = (37 * hash) + SYNCHRONOUS_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getSynchronous()); + } + if (getSwitchTypesCount() > 0) { + hash = (37 * hash) + SWITCH_TYPES_FIELD_NUMBER; + hash = (53 * hash) + hashEnumList(getSwitchTypesList()); + } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -28975,7 +29250,7 @@ public final class MasterProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -28987,24 +29262,24 @@ public final class MasterProtos { return builder; } /** - * Protobuf type {@code hbase.pb.NormalizeRequest} + * Protobuf type {@code hbase.pb.SetSplitOrMergeEnabledRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequestOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_NormalizeRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetSplitOrMergeEnabledRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_NormalizeRequest_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetSplitOrMergeEnabledRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest.Builder.class); } - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -29024,6 +29299,12 @@ public final class MasterProtos { public Builder clear() { super.clear(); + enabled_ = false; + bitField0_ = (bitField0_ & ~0x00000001); + synchronous_ = false; + bitField0_ = (bitField0_ & ~0x00000002); + switchTypes_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000004); return this; } @@ -29033,43 +29314,79 @@ public final class MasterProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_NormalizeRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetSplitOrMergeEnabledRequest_descriptor; } - public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest build() { - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest(this); + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.enabled_ = enabled_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.synchronous_ = synchronous_; + if (((bitField0_ & 0x00000004) == 0x00000004)) { + switchTypes_ = java.util.Collections.unmodifiableList(switchTypes_); + bitField0_ = (bitField0_ & ~0x00000004); + } + result.switchTypes_ = switchTypes_; + result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest.getDefaultInstance()) return this; + if (other.hasEnabled()) { + setEnabled(other.getEnabled()); + } + if (other.hasSynchronous()) { + setSynchronous(other.getSynchronous()); + } + if (!other.switchTypes_.isEmpty()) { + if (switchTypes_.isEmpty()) { + switchTypes_ = other.switchTypes_; + bitField0_ = (bitField0_ & ~0x00000004); + } else { + ensureSwitchTypesIsMutable(); + switchTypes_.addAll(other.switchTypes_); + } + onChanged(); + } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { + if (!hasEnabled()) { + + return false; + } return true; } @@ -29077,11 +29394,11 @@ public final class MasterProtos { com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parsedMessage = null; + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest) e.getUnfinishedMessage(); + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { @@ -29090,50 +29407,193 @@ public final class MasterProtos { } return this; } + private int bitField0_; - // @@protoc_insertion_point(builder_scope:hbase.pb.NormalizeRequest) + // required bool enabled = 1; + private boolean enabled_ ; + /** + * required bool enabled = 1; + */ + public boolean hasEnabled() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required bool enabled = 1; + */ + public boolean getEnabled() { + return enabled_; + } + /** + * required bool enabled = 1; + */ + public Builder setEnabled(boolean value) { + bitField0_ |= 0x00000001; + enabled_ = value; + onChanged(); + return this; + } + /** + * required bool enabled = 1; + */ + public Builder clearEnabled() { + bitField0_ = (bitField0_ & ~0x00000001); + enabled_ = false; + onChanged(); + return this; + } + + // optional bool synchronous = 2; + private boolean synchronous_ ; + /** + * optional bool synchronous = 2; + */ + public boolean hasSynchronous() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional bool synchronous = 2; + */ + public boolean getSynchronous() { + return synchronous_; + } + /** + * optional bool synchronous = 2; + */ + public Builder setSynchronous(boolean value) { + bitField0_ |= 0x00000002; + synchronous_ = value; + onChanged(); + return this; + } + /** + * optional bool synchronous = 2; + */ + public Builder clearSynchronous() { + bitField0_ = (bitField0_ & ~0x00000002); + synchronous_ = false; + onChanged(); + return this; + } + + // repeated .hbase.pb.MasterSwitchType switch_types = 3; + private java.util.List switchTypes_ = + java.util.Collections.emptyList(); + private void ensureSwitchTypesIsMutable() { + if (!((bitField0_ & 0x00000004) == 0x00000004)) { + switchTypes_ = new java.util.ArrayList(switchTypes_); + bitField0_ |= 0x00000004; + } + } + /** + * repeated .hbase.pb.MasterSwitchType switch_types = 3; + */ + public java.util.List getSwitchTypesList() { + return java.util.Collections.unmodifiableList(switchTypes_); + } + /** + * repeated .hbase.pb.MasterSwitchType switch_types = 3; + */ + public int getSwitchTypesCount() { + return switchTypes_.size(); + } + /** + * repeated .hbase.pb.MasterSwitchType switch_types = 3; + */ + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterSwitchType getSwitchTypes(int index) { + return switchTypes_.get(index); + } + /** + * repeated .hbase.pb.MasterSwitchType switch_types = 3; + */ + public Builder setSwitchTypes( + int index, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterSwitchType value) { + if (value == null) { + throw new NullPointerException(); + } + ensureSwitchTypesIsMutable(); + switchTypes_.set(index, value); + onChanged(); + return this; + } + /** + * repeated .hbase.pb.MasterSwitchType switch_types = 3; + */ + public Builder addSwitchTypes(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterSwitchType value) { + if (value == null) { + throw new NullPointerException(); + } + ensureSwitchTypesIsMutable(); + switchTypes_.add(value); + onChanged(); + return this; + } + /** + * repeated .hbase.pb.MasterSwitchType switch_types = 3; + */ + public Builder addAllSwitchTypes( + java.lang.Iterable values) { + ensureSwitchTypesIsMutable(); + super.addAll(values, switchTypes_); + onChanged(); + return this; + } + /** + * repeated .hbase.pb.MasterSwitchType switch_types = 3; + */ + public Builder clearSwitchTypes() { + switchTypes_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000004); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:hbase.pb.SetSplitOrMergeEnabledRequest) } static { - defaultInstance = new NormalizeRequest(true); + defaultInstance = new SetSplitOrMergeEnabledRequest(true); defaultInstance.initFields(); } - // @@protoc_insertion_point(class_scope:hbase.pb.NormalizeRequest) + // @@protoc_insertion_point(class_scope:hbase.pb.SetSplitOrMergeEnabledRequest) } - public interface NormalizeResponseOrBuilder + public interface SetSplitOrMergeEnabledResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - // required bool normalizer_ran = 1; + // repeated bool prev_value = 1; /** - * required bool normalizer_ran = 1; + * repeated bool prev_value = 1; */ - boolean hasNormalizerRan(); + java.util.List getPrevValueList(); /** - * required bool normalizer_ran = 1; + * repeated bool prev_value = 1; */ - boolean getNormalizerRan(); + int getPrevValueCount(); + /** + * repeated bool prev_value = 1; + */ + boolean getPrevValue(int index); } /** - * Protobuf type {@code hbase.pb.NormalizeResponse} + * Protobuf type {@code hbase.pb.SetSplitOrMergeEnabledResponse} */ - public static final class NormalizeResponse extends + public static final class SetSplitOrMergeEnabledResponse extends com.google.protobuf.GeneratedMessage - implements NormalizeResponseOrBuilder { - // Use NormalizeResponse.newBuilder() to construct. - private NormalizeResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + implements SetSplitOrMergeEnabledResponseOrBuilder { + // Use SetSplitOrMergeEnabledResponse.newBuilder() to construct. + private SetSplitOrMergeEnabledResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } - private NormalizeResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private SetSplitOrMergeEnabledResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - private static final NormalizeResponse defaultInstance; - public static NormalizeResponse getDefaultInstance() { + private static final SetSplitOrMergeEnabledResponse defaultInstance; + public static SetSplitOrMergeEnabledResponse getDefaultInstance() { return defaultInstance; } - public NormalizeResponse getDefaultInstanceForType() { + public SetSplitOrMergeEnabledResponse getDefaultInstanceForType() { return defaultInstance; } @@ -29143,7 +29603,7 @@ public final class MasterProtos { getUnknownFields() { return this.unknownFields; } - private NormalizeResponse( + private SetSplitOrMergeEnabledResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { @@ -29167,8 +29627,24 @@ public final class MasterProtos { break; } case 8: { - bitField0_ |= 0x00000001; - normalizerRan_ = input.readBool(); + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + prevValue_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + prevValue_.add(input.readBool()); + break; + } + case 10: { + int length = input.readRawVarint32(); + int limit = input.pushLimit(length); + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001) && input.getBytesUntilLimit() > 0) { + prevValue_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + while (input.getBytesUntilLimit() > 0) { + prevValue_.add(input.readBool()); + } + input.popLimit(limit); break; } } @@ -29179,66 +29655,71 @@ public final class MasterProtos { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + prevValue_ = java.util.Collections.unmodifiableList(prevValue_); + } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_NormalizeResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetSplitOrMergeEnabledResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_NormalizeResponse_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetSplitOrMergeEnabledResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public NormalizeResponse parsePartialFrom( + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public SetSplitOrMergeEnabledResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new NormalizeResponse(input, extensionRegistry); + return new SetSplitOrMergeEnabledResponse(input, extensionRegistry); } }; @java.lang.Override - public com.google.protobuf.Parser getParserForType() { + public com.google.protobuf.Parser getParserForType() { return PARSER; } - private int bitField0_; - // required bool normalizer_ran = 1; - public static final int NORMALIZER_RAN_FIELD_NUMBER = 1; - private boolean normalizerRan_; + // repeated bool prev_value = 1; + public static final int PREV_VALUE_FIELD_NUMBER = 1; + private java.util.List prevValue_; /** - * required bool normalizer_ran = 1; + * repeated bool prev_value = 1; */ - public boolean hasNormalizerRan() { - return ((bitField0_ & 0x00000001) == 0x00000001); + public java.util.List + getPrevValueList() { + return prevValue_; } /** - * required bool normalizer_ran = 1; + * repeated bool prev_value = 1; */ - public boolean getNormalizerRan() { - return normalizerRan_; + public int getPrevValueCount() { + return prevValue_.size(); + } + /** + * repeated bool prev_value = 1; + */ + public boolean getPrevValue(int index) { + return prevValue_.get(index); } private void initFields() { - normalizerRan_ = false; + prevValue_ = java.util.Collections.emptyList(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - if (!hasNormalizerRan()) { - memoizedIsInitialized = 0; - return false; - } memoizedIsInitialized = 1; return true; } @@ -29246,8 +29727,8 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBool(1, normalizerRan_); + for (int i = 0; i < prevValue_.size(); i++) { + output.writeBool(1, prevValue_.get(i)); } getUnknownFields().writeTo(output); } @@ -29258,9 +29739,11 @@ public final class MasterProtos { if (size != -1) return size; size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(1, normalizerRan_); + { + int dataSize = 0; + dataSize = 1 * getPrevValueList().size(); + size += dataSize; + size += 1 * getPrevValueList().size(); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; @@ -29279,17 +29762,14 @@ public final class MasterProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse) obj; + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse) obj; boolean result = true; - result = result && (hasNormalizerRan() == other.hasNormalizerRan()); - if (hasNormalizerRan()) { - result = result && (getNormalizerRan() - == other.getNormalizerRan()); - } + result = result && getPrevValueList() + .equals(other.getPrevValueList()); result = result && getUnknownFields().equals(other.getUnknownFields()); return result; @@ -29303,62 +29783,62 @@ public final class MasterProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasNormalizerRan()) { - hash = (37 * hash) + NORMALIZER_RAN_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getNormalizerRan()); + if (getPrevValueCount() > 0) { + hash = (37 * hash) + PREV_VALUE_FIELD_NUMBER; + hash = (53 * hash) + getPrevValueList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -29367,7 +29847,7 @@ public final class MasterProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -29379,24 +29859,24 @@ public final class MasterProtos { return builder; } /** - * Protobuf type {@code hbase.pb.NormalizeResponse} + * Protobuf type {@code hbase.pb.SetSplitOrMergeEnabledResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponseOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_NormalizeResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetSplitOrMergeEnabledResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_NormalizeResponse_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetSplitOrMergeEnabledResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse.Builder.class); } - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -29416,7 +29896,7 @@ public final class MasterProtos { public Builder clear() { super.clear(); - normalizerRan_ = false; + prevValue_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); return this; } @@ -29427,57 +29907,59 @@ public final class MasterProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_NormalizeResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetSplitOrMergeEnabledResponse_descriptor; } - public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse build() { - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse(this); + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse(this); int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + prevValue_ = java.util.Collections.unmodifiableList(prevValue_); + bitField0_ = (bitField0_ & ~0x00000001); } - result.normalizerRan_ = normalizerRan_; - result.bitField0_ = to_bitField0_; + result.prevValue_ = prevValue_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.getDefaultInstance()) return this; - if (other.hasNormalizerRan()) { - setNormalizerRan(other.getNormalizerRan()); + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse.getDefaultInstance()) return this; + if (!other.prevValue_.isEmpty()) { + if (prevValue_.isEmpty()) { + prevValue_ = other.prevValue_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensurePrevValueIsMutable(); + prevValue_.addAll(other.prevValue_); + } + onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { - if (!hasNormalizerRan()) { - - return false; - } return true; } @@ -29485,11 +29967,11 @@ public final class MasterProtos { com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parsedMessage = null; + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse) e.getUnfinishedMessage(); + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { @@ -29500,82 +29982,115 @@ public final class MasterProtos { } private int bitField0_; - // required bool normalizer_ran = 1; - private boolean normalizerRan_ ; + // repeated bool prev_value = 1; + private java.util.List prevValue_ = java.util.Collections.emptyList(); + private void ensurePrevValueIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + prevValue_ = new java.util.ArrayList(prevValue_); + bitField0_ |= 0x00000001; + } + } /** - * required bool normalizer_ran = 1; + * repeated bool prev_value = 1; */ - public boolean hasNormalizerRan() { - return ((bitField0_ & 0x00000001) == 0x00000001); + public java.util.List + getPrevValueList() { + return java.util.Collections.unmodifiableList(prevValue_); } /** - * required bool normalizer_ran = 1; + * repeated bool prev_value = 1; */ - public boolean getNormalizerRan() { - return normalizerRan_; + public int getPrevValueCount() { + return prevValue_.size(); } /** - * required bool normalizer_ran = 1; + * repeated bool prev_value = 1; */ - public Builder setNormalizerRan(boolean value) { - bitField0_ |= 0x00000001; - normalizerRan_ = value; + public boolean getPrevValue(int index) { + return prevValue_.get(index); + } + /** + * repeated bool prev_value = 1; + */ + public Builder setPrevValue( + int index, boolean value) { + ensurePrevValueIsMutable(); + prevValue_.set(index, value); onChanged(); return this; } /** - * required bool normalizer_ran = 1; + * repeated bool prev_value = 1; */ - public Builder clearNormalizerRan() { + public Builder addPrevValue(boolean value) { + ensurePrevValueIsMutable(); + prevValue_.add(value); + onChanged(); + return this; + } + /** + * repeated bool prev_value = 1; + */ + public Builder addAllPrevValue( + java.lang.Iterable values) { + ensurePrevValueIsMutable(); + super.addAll(values, prevValue_); + onChanged(); + return this; + } + /** + * repeated bool prev_value = 1; + */ + public Builder clearPrevValue() { + prevValue_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); - normalizerRan_ = false; onChanged(); return this; } - // @@protoc_insertion_point(builder_scope:hbase.pb.NormalizeResponse) + // @@protoc_insertion_point(builder_scope:hbase.pb.SetSplitOrMergeEnabledResponse) } static { - defaultInstance = new NormalizeResponse(true); + defaultInstance = new SetSplitOrMergeEnabledResponse(true); defaultInstance.initFields(); } - // @@protoc_insertion_point(class_scope:hbase.pb.NormalizeResponse) + // @@protoc_insertion_point(class_scope:hbase.pb.SetSplitOrMergeEnabledResponse) } - public interface SetNormalizerRunningRequestOrBuilder + public interface IsSplitOrMergeEnabledRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - // required bool on = 1; + // required .hbase.pb.MasterSwitchType switch_type = 1; /** - * required bool on = 1; + * required .hbase.pb.MasterSwitchType switch_type = 1; */ - boolean hasOn(); + boolean hasSwitchType(); /** - * required bool on = 1; + * required .hbase.pb.MasterSwitchType switch_type = 1; */ - boolean getOn(); + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterSwitchType getSwitchType(); } /** - * Protobuf type {@code hbase.pb.SetNormalizerRunningRequest} + * Protobuf type {@code hbase.pb.IsSplitOrMergeEnabledRequest} */ - public static final class SetNormalizerRunningRequest extends + public static final class IsSplitOrMergeEnabledRequest extends com.google.protobuf.GeneratedMessage - implements SetNormalizerRunningRequestOrBuilder { - // Use SetNormalizerRunningRequest.newBuilder() to construct. - private SetNormalizerRunningRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + implements IsSplitOrMergeEnabledRequestOrBuilder { + // Use IsSplitOrMergeEnabledRequest.newBuilder() to construct. + private IsSplitOrMergeEnabledRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } - private SetNormalizerRunningRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private IsSplitOrMergeEnabledRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - private static final SetNormalizerRunningRequest defaultInstance; - public static SetNormalizerRunningRequest getDefaultInstance() { + private static final IsSplitOrMergeEnabledRequest defaultInstance; + public static IsSplitOrMergeEnabledRequest getDefaultInstance() { return defaultInstance; } - public SetNormalizerRunningRequest getDefaultInstanceForType() { + public IsSplitOrMergeEnabledRequest getDefaultInstanceForType() { return defaultInstance; } @@ -29585,7 +30100,7 @@ public final class MasterProtos { getUnknownFields() { return this.unknownFields; } - private SetNormalizerRunningRequest( + private IsSplitOrMergeEnabledRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { @@ -29609,8 +30124,14 @@ public final class MasterProtos { break; } case 8: { - bitField0_ |= 0x00000001; - on_ = input.readBool(); + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterSwitchType value = org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterSwitchType.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(1, rawValue); + } else { + bitField0_ |= 0x00000001; + switchType_ = value; + } break; } } @@ -29627,57 +30148,57 @@ public final class MasterProtos { } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetNormalizerRunningRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsSplitOrMergeEnabledRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetNormalizerRunningRequest_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsSplitOrMergeEnabledRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public SetNormalizerRunningRequest parsePartialFrom( + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public IsSplitOrMergeEnabledRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new SetNormalizerRunningRequest(input, extensionRegistry); + return new IsSplitOrMergeEnabledRequest(input, extensionRegistry); } }; @java.lang.Override - public com.google.protobuf.Parser getParserForType() { + public com.google.protobuf.Parser getParserForType() { return PARSER; } private int bitField0_; - // required bool on = 1; - public static final int ON_FIELD_NUMBER = 1; - private boolean on_; + // required .hbase.pb.MasterSwitchType switch_type = 1; + public static final int SWITCH_TYPE_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterSwitchType switchType_; /** - * required bool on = 1; + * required .hbase.pb.MasterSwitchType switch_type = 1; */ - public boolean hasOn() { + public boolean hasSwitchType() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * required bool on = 1; + * required .hbase.pb.MasterSwitchType switch_type = 1; */ - public boolean getOn() { - return on_; + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterSwitchType getSwitchType() { + return switchType_; } private void initFields() { - on_ = false; + switchType_ = org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterSwitchType.SPLIT; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - if (!hasOn()) { + if (!hasSwitchType()) { memoizedIsInitialized = 0; return false; } @@ -29689,7 +30210,7 @@ public final class MasterProtos { throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBool(1, on_); + output.writeEnum(1, switchType_.getNumber()); } getUnknownFields().writeTo(output); } @@ -29702,7 +30223,7 @@ public final class MasterProtos { size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeBoolSize(1, on_); + .computeEnumSize(1, switchType_.getNumber()); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; @@ -29721,16 +30242,16 @@ public final class MasterProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest) obj; + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest) obj; boolean result = true; - result = result && (hasOn() == other.hasOn()); - if (hasOn()) { - result = result && (getOn() - == other.getOn()); + result = result && (hasSwitchType() == other.hasSwitchType()); + if (hasSwitchType()) { + result = result && + (getSwitchType() == other.getSwitchType()); } result = result && getUnknownFields().equals(other.getUnknownFields()); @@ -29745,62 +30266,62 @@ public final class MasterProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasOn()) { - hash = (37 * hash) + ON_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getOn()); + if (hasSwitchType()) { + hash = (37 * hash) + SWITCH_TYPE_FIELD_NUMBER; + hash = (53 * hash) + hashEnum(getSwitchType()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -29809,7 +30330,7 @@ public final class MasterProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -29821,24 +30342,24 @@ public final class MasterProtos { return builder; } /** - * Protobuf type {@code hbase.pb.SetNormalizerRunningRequest} + * Protobuf type {@code hbase.pb.IsSplitOrMergeEnabledRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequestOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetNormalizerRunningRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsSplitOrMergeEnabledRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetNormalizerRunningRequest_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsSplitOrMergeEnabledRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest.Builder.class); } - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -29858,7 +30379,7 @@ public final class MasterProtos { public Builder clear() { super.clear(); - on_ = false; + switchType_ = org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterSwitchType.SPLIT; bitField0_ = (bitField0_ & ~0x00000001); return this; } @@ -29869,54 +30390,54 @@ public final class MasterProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetNormalizerRunningRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsSplitOrMergeEnabledRequest_descriptor; } - public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest build() { - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest(this); + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } - result.on_ = on_; + result.switchType_ = switchType_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest.getDefaultInstance()) return this; - if (other.hasOn()) { - setOn(other.getOn()); + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest.getDefaultInstance()) return this; + if (other.hasSwitchType()) { + setSwitchType(other.getSwitchType()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { - if (!hasOn()) { + if (!hasSwitchType()) { return false; } @@ -29927,11 +30448,11 @@ public final class MasterProtos { com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parsedMessage = null; + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest) e.getUnfinishedMessage(); + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { @@ -29942,82 +30463,85 @@ public final class MasterProtos { } private int bitField0_; - // required bool on = 1; - private boolean on_ ; + // required .hbase.pb.MasterSwitchType switch_type = 1; + private org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterSwitchType switchType_ = org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterSwitchType.SPLIT; /** - * required bool on = 1; + * required .hbase.pb.MasterSwitchType switch_type = 1; */ - public boolean hasOn() { + public boolean hasSwitchType() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * required bool on = 1; + * required .hbase.pb.MasterSwitchType switch_type = 1; */ - public boolean getOn() { - return on_; + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterSwitchType getSwitchType() { + return switchType_; } /** - * required bool on = 1; + * required .hbase.pb.MasterSwitchType switch_type = 1; */ - public Builder setOn(boolean value) { + public Builder setSwitchType(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterSwitchType value) { + if (value == null) { + throw new NullPointerException(); + } bitField0_ |= 0x00000001; - on_ = value; + switchType_ = value; onChanged(); return this; } /** - * required bool on = 1; + * required .hbase.pb.MasterSwitchType switch_type = 1; */ - public Builder clearOn() { + public Builder clearSwitchType() { bitField0_ = (bitField0_ & ~0x00000001); - on_ = false; + switchType_ = org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterSwitchType.SPLIT; onChanged(); return this; } - // @@protoc_insertion_point(builder_scope:hbase.pb.SetNormalizerRunningRequest) + // @@protoc_insertion_point(builder_scope:hbase.pb.IsSplitOrMergeEnabledRequest) } static { - defaultInstance = new SetNormalizerRunningRequest(true); + defaultInstance = new IsSplitOrMergeEnabledRequest(true); defaultInstance.initFields(); } - // @@protoc_insertion_point(class_scope:hbase.pb.SetNormalizerRunningRequest) + // @@protoc_insertion_point(class_scope:hbase.pb.IsSplitOrMergeEnabledRequest) } - public interface SetNormalizerRunningResponseOrBuilder + public interface IsSplitOrMergeEnabledResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - // optional bool prev_normalizer_value = 1; + // required bool enabled = 1; /** - * optional bool prev_normalizer_value = 1; + * required bool enabled = 1; */ - boolean hasPrevNormalizerValue(); + boolean hasEnabled(); /** - * optional bool prev_normalizer_value = 1; + * required bool enabled = 1; */ - boolean getPrevNormalizerValue(); + boolean getEnabled(); } /** - * Protobuf type {@code hbase.pb.SetNormalizerRunningResponse} + * Protobuf type {@code hbase.pb.IsSplitOrMergeEnabledResponse} */ - public static final class SetNormalizerRunningResponse extends + public static final class IsSplitOrMergeEnabledResponse extends com.google.protobuf.GeneratedMessage - implements SetNormalizerRunningResponseOrBuilder { - // Use SetNormalizerRunningResponse.newBuilder() to construct. - private SetNormalizerRunningResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + implements IsSplitOrMergeEnabledResponseOrBuilder { + // Use IsSplitOrMergeEnabledResponse.newBuilder() to construct. + private IsSplitOrMergeEnabledResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } - private SetNormalizerRunningResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private IsSplitOrMergeEnabledResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - private static final SetNormalizerRunningResponse defaultInstance; - public static SetNormalizerRunningResponse getDefaultInstance() { + private static final IsSplitOrMergeEnabledResponse defaultInstance; + public static IsSplitOrMergeEnabledResponse getDefaultInstance() { return defaultInstance; } - public SetNormalizerRunningResponse getDefaultInstanceForType() { + public IsSplitOrMergeEnabledResponse getDefaultInstanceForType() { return defaultInstance; } @@ -30027,7 +30551,7 @@ public final class MasterProtos { getUnknownFields() { return this.unknownFields; } - private SetNormalizerRunningResponse( + private IsSplitOrMergeEnabledResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { @@ -30052,7 +30576,7 @@ public final class MasterProtos { } case 8: { bitField0_ |= 0x00000001; - prevNormalizerValue_ = input.readBool(); + enabled_ = input.readBool(); break; } } @@ -30069,56 +30593,60 @@ public final class MasterProtos { } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetNormalizerRunningResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsSplitOrMergeEnabledResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetNormalizerRunningResponse_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsSplitOrMergeEnabledResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public SetNormalizerRunningResponse parsePartialFrom( + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public IsSplitOrMergeEnabledResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new SetNormalizerRunningResponse(input, extensionRegistry); + return new IsSplitOrMergeEnabledResponse(input, extensionRegistry); } }; @java.lang.Override - public com.google.protobuf.Parser getParserForType() { + public com.google.protobuf.Parser getParserForType() { return PARSER; } private int bitField0_; - // optional bool prev_normalizer_value = 1; - public static final int PREV_NORMALIZER_VALUE_FIELD_NUMBER = 1; - private boolean prevNormalizerValue_; + // required bool enabled = 1; + public static final int ENABLED_FIELD_NUMBER = 1; + private boolean enabled_; /** - * optional bool prev_normalizer_value = 1; + * required bool enabled = 1; */ - public boolean hasPrevNormalizerValue() { + public boolean hasEnabled() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * optional bool prev_normalizer_value = 1; + * required bool enabled = 1; */ - public boolean getPrevNormalizerValue() { - return prevNormalizerValue_; + public boolean getEnabled() { + return enabled_; } private void initFields() { - prevNormalizerValue_ = false; + enabled_ = false; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; + if (!hasEnabled()) { + memoizedIsInitialized = 0; + return false; + } memoizedIsInitialized = 1; return true; } @@ -30127,7 +30655,7 @@ public final class MasterProtos { throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBool(1, prevNormalizerValue_); + output.writeBool(1, enabled_); } getUnknownFields().writeTo(output); } @@ -30140,7 +30668,7 @@ public final class MasterProtos { size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeBoolSize(1, prevNormalizerValue_); + .computeBoolSize(1, enabled_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; @@ -30159,16 +30687,16 @@ public final class MasterProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse) obj; + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse) obj; boolean result = true; - result = result && (hasPrevNormalizerValue() == other.hasPrevNormalizerValue()); - if (hasPrevNormalizerValue()) { - result = result && (getPrevNormalizerValue() - == other.getPrevNormalizerValue()); + result = result && (hasEnabled() == other.hasEnabled()); + if (hasEnabled()) { + result = result && (getEnabled() + == other.getEnabled()); } result = result && getUnknownFields().equals(other.getUnknownFields()); @@ -30183,62 +30711,62 @@ public final class MasterProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasPrevNormalizerValue()) { - hash = (37 * hash) + PREV_NORMALIZER_VALUE_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getPrevNormalizerValue()); + if (hasEnabled()) { + hash = (37 * hash) + ENABLED_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getEnabled()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -30247,7 +30775,7 @@ public final class MasterProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -30259,24 +30787,24 @@ public final class MasterProtos { return builder; } /** - * Protobuf type {@code hbase.pb.SetNormalizerRunningResponse} + * Protobuf type {@code hbase.pb.IsSplitOrMergeEnabledResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponseOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetNormalizerRunningResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsSplitOrMergeEnabledResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetNormalizerRunningResponse_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsSplitOrMergeEnabledResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse.Builder.class); } - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -30296,7 +30824,7 @@ public final class MasterProtos { public Builder clear() { super.clear(); - prevNormalizerValue_ = false; + enabled_ = false; bitField0_ = (bitField0_ & ~0x00000001); return this; } @@ -30307,53 +30835,57 @@ public final class MasterProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetNormalizerRunningResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsSplitOrMergeEnabledResponse_descriptor; } - public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse build() { - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse(this); + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } - result.prevNormalizerValue_ = prevNormalizerValue_; + result.enabled_ = enabled_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.getDefaultInstance()) return this; - if (other.hasPrevNormalizerValue()) { - setPrevNormalizerValue(other.getPrevNormalizerValue()); + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse.getDefaultInstance()) return this; + if (other.hasEnabled()) { + setEnabled(other.getEnabled()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { + if (!hasEnabled()) { + + return false; + } return true; } @@ -30361,11 +30893,11 @@ public final class MasterProtos { com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parsedMessage = null; + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse) e.getUnfinishedMessage(); + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { @@ -30376,72 +30908,72 @@ public final class MasterProtos { } private int bitField0_; - // optional bool prev_normalizer_value = 1; - private boolean prevNormalizerValue_ ; + // required bool enabled = 1; + private boolean enabled_ ; /** - * optional bool prev_normalizer_value = 1; + * required bool enabled = 1; */ - public boolean hasPrevNormalizerValue() { + public boolean hasEnabled() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * optional bool prev_normalizer_value = 1; + * required bool enabled = 1; */ - public boolean getPrevNormalizerValue() { - return prevNormalizerValue_; + public boolean getEnabled() { + return enabled_; } /** - * optional bool prev_normalizer_value = 1; + * required bool enabled = 1; */ - public Builder setPrevNormalizerValue(boolean value) { + public Builder setEnabled(boolean value) { bitField0_ |= 0x00000001; - prevNormalizerValue_ = value; + enabled_ = value; onChanged(); return this; } /** - * optional bool prev_normalizer_value = 1; + * required bool enabled = 1; */ - public Builder clearPrevNormalizerValue() { + public Builder clearEnabled() { bitField0_ = (bitField0_ & ~0x00000001); - prevNormalizerValue_ = false; + enabled_ = false; onChanged(); return this; } - // @@protoc_insertion_point(builder_scope:hbase.pb.SetNormalizerRunningResponse) + // @@protoc_insertion_point(builder_scope:hbase.pb.IsSplitOrMergeEnabledResponse) } static { - defaultInstance = new SetNormalizerRunningResponse(true); + defaultInstance = new IsSplitOrMergeEnabledResponse(true); defaultInstance.initFields(); } - // @@protoc_insertion_point(class_scope:hbase.pb.SetNormalizerRunningResponse) + // @@protoc_insertion_point(class_scope:hbase.pb.IsSplitOrMergeEnabledResponse) } - public interface IsNormalizerEnabledRequestOrBuilder + public interface NormalizeRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { } /** - * Protobuf type {@code hbase.pb.IsNormalizerEnabledRequest} + * Protobuf type {@code hbase.pb.NormalizeRequest} */ - public static final class IsNormalizerEnabledRequest extends + public static final class NormalizeRequest extends com.google.protobuf.GeneratedMessage - implements IsNormalizerEnabledRequestOrBuilder { - // Use IsNormalizerEnabledRequest.newBuilder() to construct. - private IsNormalizerEnabledRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + implements NormalizeRequestOrBuilder { + // Use NormalizeRequest.newBuilder() to construct. + private NormalizeRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } - private IsNormalizerEnabledRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private NormalizeRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - private static final IsNormalizerEnabledRequest defaultInstance; - public static IsNormalizerEnabledRequest getDefaultInstance() { + private static final NormalizeRequest defaultInstance; + public static NormalizeRequest getDefaultInstance() { return defaultInstance; } - public IsNormalizerEnabledRequest getDefaultInstanceForType() { + public NormalizeRequest getDefaultInstanceForType() { return defaultInstance; } @@ -30451,7 +30983,7 @@ public final class MasterProtos { getUnknownFields() { return this.unknownFields; } - private IsNormalizerEnabledRequest( + private NormalizeRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { @@ -30487,28 +31019,28 @@ public final class MasterProtos { } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsNormalizerEnabledRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_NormalizeRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsNormalizerEnabledRequest_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_NormalizeRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public IsNormalizerEnabledRequest parsePartialFrom( + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public NormalizeRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new IsNormalizerEnabledRequest(input, extensionRegistry); + return new NormalizeRequest(input, extensionRegistry); } }; @java.lang.Override - public com.google.protobuf.Parser getParserForType() { + public com.google.protobuf.Parser getParserForType() { return PARSER; } @@ -30552,10 +31084,10 @@ public final class MasterProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest) obj; + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest) obj; boolean result = true; result = result && @@ -30576,53 +31108,53 @@ public final class MasterProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -30631,7 +31163,7 @@ public final class MasterProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -30643,24 +31175,24 @@ public final class MasterProtos { return builder; } /** - * Protobuf type {@code hbase.pb.IsNormalizerEnabledRequest} + * Protobuf type {@code hbase.pb.NormalizeRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequestOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsNormalizerEnabledRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_NormalizeRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsNormalizerEnabledRequest_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_NormalizeRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest.Builder.class); } - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -30689,38 +31221,38 @@ public final class MasterProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsNormalizerEnabledRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_NormalizeRequest_descriptor; } - public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest build() { - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest(this); + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } @@ -30733,11 +31265,11 @@ public final class MasterProtos { com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parsedMessage = null; + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest) e.getUnfinishedMessage(); + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { @@ -30747,49 +31279,49 @@ public final class MasterProtos { return this; } - // @@protoc_insertion_point(builder_scope:hbase.pb.IsNormalizerEnabledRequest) + // @@protoc_insertion_point(builder_scope:hbase.pb.NormalizeRequest) } static { - defaultInstance = new IsNormalizerEnabledRequest(true); + defaultInstance = new NormalizeRequest(true); defaultInstance.initFields(); } - // @@protoc_insertion_point(class_scope:hbase.pb.IsNormalizerEnabledRequest) + // @@protoc_insertion_point(class_scope:hbase.pb.NormalizeRequest) } - public interface IsNormalizerEnabledResponseOrBuilder + public interface NormalizeResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - // required bool enabled = 1; + // required bool normalizer_ran = 1; /** - * required bool enabled = 1; + * required bool normalizer_ran = 1; */ - boolean hasEnabled(); + boolean hasNormalizerRan(); /** - * required bool enabled = 1; + * required bool normalizer_ran = 1; */ - boolean getEnabled(); + boolean getNormalizerRan(); } /** - * Protobuf type {@code hbase.pb.IsNormalizerEnabledResponse} + * Protobuf type {@code hbase.pb.NormalizeResponse} */ - public static final class IsNormalizerEnabledResponse extends + public static final class NormalizeResponse extends com.google.protobuf.GeneratedMessage - implements IsNormalizerEnabledResponseOrBuilder { - // Use IsNormalizerEnabledResponse.newBuilder() to construct. - private IsNormalizerEnabledResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + implements NormalizeResponseOrBuilder { + // Use NormalizeResponse.newBuilder() to construct. + private NormalizeResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } - private IsNormalizerEnabledResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private NormalizeResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - private static final IsNormalizerEnabledResponse defaultInstance; - public static IsNormalizerEnabledResponse getDefaultInstance() { + private static final NormalizeResponse defaultInstance; + public static NormalizeResponse getDefaultInstance() { return defaultInstance; } - public IsNormalizerEnabledResponse getDefaultInstanceForType() { + public NormalizeResponse getDefaultInstanceForType() { return defaultInstance; } @@ -30799,7 +31331,1663 @@ public final class MasterProtos { getUnknownFields() { return this.unknownFields; } - private IsNormalizerEnabledResponse( + private NormalizeResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + normalizerRan_ = input.readBool(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_NormalizeResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_NormalizeResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public NormalizeResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new NormalizeResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // required bool normalizer_ran = 1; + public static final int NORMALIZER_RAN_FIELD_NUMBER = 1; + private boolean normalizerRan_; + /** + * required bool normalizer_ran = 1; + */ + public boolean hasNormalizerRan() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required bool normalizer_ran = 1; + */ + public boolean getNormalizerRan() { + return normalizerRan_; + } + + private void initFields() { + normalizerRan_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasNormalizerRan()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBool(1, normalizerRan_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(1, normalizerRan_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse) obj; + + boolean result = true; + result = result && (hasNormalizerRan() == other.hasNormalizerRan()); + if (hasNormalizerRan()) { + result = result && (getNormalizerRan() + == other.getNormalizerRan()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasNormalizerRan()) { + hash = (37 * hash) + NORMALIZER_RAN_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getNormalizerRan()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code hbase.pb.NormalizeResponse} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_NormalizeResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_NormalizeResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + normalizerRan_ = false; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_NormalizeResponse_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.normalizerRan_ = normalizerRan_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.getDefaultInstance()) return this; + if (other.hasNormalizerRan()) { + setNormalizerRan(other.getNormalizerRan()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasNormalizerRan()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required bool normalizer_ran = 1; + private boolean normalizerRan_ ; + /** + * required bool normalizer_ran = 1; + */ + public boolean hasNormalizerRan() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required bool normalizer_ran = 1; + */ + public boolean getNormalizerRan() { + return normalizerRan_; + } + /** + * required bool normalizer_ran = 1; + */ + public Builder setNormalizerRan(boolean value) { + bitField0_ |= 0x00000001; + normalizerRan_ = value; + onChanged(); + return this; + } + /** + * required bool normalizer_ran = 1; + */ + public Builder clearNormalizerRan() { + bitField0_ = (bitField0_ & ~0x00000001); + normalizerRan_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:hbase.pb.NormalizeResponse) + } + + static { + defaultInstance = new NormalizeResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:hbase.pb.NormalizeResponse) + } + + public interface SetNormalizerRunningRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bool on = 1; + /** + * required bool on = 1; + */ + boolean hasOn(); + /** + * required bool on = 1; + */ + boolean getOn(); + } + /** + * Protobuf type {@code hbase.pb.SetNormalizerRunningRequest} + */ + public static final class SetNormalizerRunningRequest extends + com.google.protobuf.GeneratedMessage + implements SetNormalizerRunningRequestOrBuilder { + // Use SetNormalizerRunningRequest.newBuilder() to construct. + private SetNormalizerRunningRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private SetNormalizerRunningRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final SetNormalizerRunningRequest defaultInstance; + public static SetNormalizerRunningRequest getDefaultInstance() { + return defaultInstance; + } + + public SetNormalizerRunningRequest getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private SetNormalizerRunningRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + on_ = input.readBool(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetNormalizerRunningRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetNormalizerRunningRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public SetNormalizerRunningRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SetNormalizerRunningRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // required bool on = 1; + public static final int ON_FIELD_NUMBER = 1; + private boolean on_; + /** + * required bool on = 1; + */ + public boolean hasOn() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required bool on = 1; + */ + public boolean getOn() { + return on_; + } + + private void initFields() { + on_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasOn()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBool(1, on_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(1, on_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest) obj; + + boolean result = true; + result = result && (hasOn() == other.hasOn()); + if (hasOn()) { + result = result && (getOn() + == other.getOn()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasOn()) { + hash = (37 * hash) + ON_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getOn()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code hbase.pb.SetNormalizerRunningRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetNormalizerRunningRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetNormalizerRunningRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + on_ = false; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetNormalizerRunningRequest_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.on_ = on_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest.getDefaultInstance()) return this; + if (other.hasOn()) { + setOn(other.getOn()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasOn()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required bool on = 1; + private boolean on_ ; + /** + * required bool on = 1; + */ + public boolean hasOn() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required bool on = 1; + */ + public boolean getOn() { + return on_; + } + /** + * required bool on = 1; + */ + public Builder setOn(boolean value) { + bitField0_ |= 0x00000001; + on_ = value; + onChanged(); + return this; + } + /** + * required bool on = 1; + */ + public Builder clearOn() { + bitField0_ = (bitField0_ & ~0x00000001); + on_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:hbase.pb.SetNormalizerRunningRequest) + } + + static { + defaultInstance = new SetNormalizerRunningRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:hbase.pb.SetNormalizerRunningRequest) + } + + public interface SetNormalizerRunningResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional bool prev_normalizer_value = 1; + /** + * optional bool prev_normalizer_value = 1; + */ + boolean hasPrevNormalizerValue(); + /** + * optional bool prev_normalizer_value = 1; + */ + boolean getPrevNormalizerValue(); + } + /** + * Protobuf type {@code hbase.pb.SetNormalizerRunningResponse} + */ + public static final class SetNormalizerRunningResponse extends + com.google.protobuf.GeneratedMessage + implements SetNormalizerRunningResponseOrBuilder { + // Use SetNormalizerRunningResponse.newBuilder() to construct. + private SetNormalizerRunningResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private SetNormalizerRunningResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final SetNormalizerRunningResponse defaultInstance; + public static SetNormalizerRunningResponse getDefaultInstance() { + return defaultInstance; + } + + public SetNormalizerRunningResponse getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private SetNormalizerRunningResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + prevNormalizerValue_ = input.readBool(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetNormalizerRunningResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetNormalizerRunningResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public SetNormalizerRunningResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SetNormalizerRunningResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // optional bool prev_normalizer_value = 1; + public static final int PREV_NORMALIZER_VALUE_FIELD_NUMBER = 1; + private boolean prevNormalizerValue_; + /** + * optional bool prev_normalizer_value = 1; + */ + public boolean hasPrevNormalizerValue() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * optional bool prev_normalizer_value = 1; + */ + public boolean getPrevNormalizerValue() { + return prevNormalizerValue_; + } + + private void initFields() { + prevNormalizerValue_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBool(1, prevNormalizerValue_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(1, prevNormalizerValue_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse) obj; + + boolean result = true; + result = result && (hasPrevNormalizerValue() == other.hasPrevNormalizerValue()); + if (hasPrevNormalizerValue()) { + result = result && (getPrevNormalizerValue() + == other.getPrevNormalizerValue()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasPrevNormalizerValue()) { + hash = (37 * hash) + PREV_NORMALIZER_VALUE_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getPrevNormalizerValue()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code hbase.pb.SetNormalizerRunningResponse} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetNormalizerRunningResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetNormalizerRunningResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + prevNormalizerValue_ = false; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetNormalizerRunningResponse_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.prevNormalizerValue_ = prevNormalizerValue_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.getDefaultInstance()) return this; + if (other.hasPrevNormalizerValue()) { + setPrevNormalizerValue(other.getPrevNormalizerValue()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // optional bool prev_normalizer_value = 1; + private boolean prevNormalizerValue_ ; + /** + * optional bool prev_normalizer_value = 1; + */ + public boolean hasPrevNormalizerValue() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * optional bool prev_normalizer_value = 1; + */ + public boolean getPrevNormalizerValue() { + return prevNormalizerValue_; + } + /** + * optional bool prev_normalizer_value = 1; + */ + public Builder setPrevNormalizerValue(boolean value) { + bitField0_ |= 0x00000001; + prevNormalizerValue_ = value; + onChanged(); + return this; + } + /** + * optional bool prev_normalizer_value = 1; + */ + public Builder clearPrevNormalizerValue() { + bitField0_ = (bitField0_ & ~0x00000001); + prevNormalizerValue_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:hbase.pb.SetNormalizerRunningResponse) + } + + static { + defaultInstance = new SetNormalizerRunningResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:hbase.pb.SetNormalizerRunningResponse) + } + + public interface IsNormalizerEnabledRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + /** + * Protobuf type {@code hbase.pb.IsNormalizerEnabledRequest} + */ + public static final class IsNormalizerEnabledRequest extends + com.google.protobuf.GeneratedMessage + implements IsNormalizerEnabledRequestOrBuilder { + // Use IsNormalizerEnabledRequest.newBuilder() to construct. + private IsNormalizerEnabledRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private IsNormalizerEnabledRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final IsNormalizerEnabledRequest defaultInstance; + public static IsNormalizerEnabledRequest getDefaultInstance() { + return defaultInstance; + } + + public IsNormalizerEnabledRequest getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private IsNormalizerEnabledRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsNormalizerEnabledRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsNormalizerEnabledRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public IsNormalizerEnabledRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new IsNormalizerEnabledRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code hbase.pb.IsNormalizerEnabledRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsNormalizerEnabledRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsNormalizerEnabledRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsNormalizerEnabledRequest_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + // @@protoc_insertion_point(builder_scope:hbase.pb.IsNormalizerEnabledRequest) + } + + static { + defaultInstance = new IsNormalizerEnabledRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:hbase.pb.IsNormalizerEnabledRequest) + } + + public interface IsNormalizerEnabledResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bool enabled = 1; + /** + * required bool enabled = 1; + */ + boolean hasEnabled(); + /** + * required bool enabled = 1; + */ + boolean getEnabled(); + } + /** + * Protobuf type {@code hbase.pb.IsNormalizerEnabledResponse} + */ + public static final class IsNormalizerEnabledResponse extends + com.google.protobuf.GeneratedMessage + implements IsNormalizerEnabledResponseOrBuilder { + // Use IsNormalizerEnabledResponse.newBuilder() to construct. + private IsNormalizerEnabledResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private IsNormalizerEnabledResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final IsNormalizerEnabledResponse defaultInstance; + public static IsNormalizerEnabledResponse getDefaultInstance() { + return defaultInstance; + } + + public IsNormalizerEnabledResponse getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private IsNormalizerEnabledResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { @@ -57338,6 +59526,33 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** + * rpc SetSplitOrMergeEnabled(.hbase.pb.SetSplitOrMergeEnabledRequest) returns (.hbase.pb.SetSplitOrMergeEnabledResponse); + * + *
+       **
+       * Turn the split or merge switch on or off.
+       * If synchronous is true, it waits until current operation call, if outstanding, to return.
+       * 
+ */ + public abstract void setSplitOrMergeEnabled( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest request, + com.google.protobuf.RpcCallback done); + + /** + * rpc IsSplitOrMergeEnabled(.hbase.pb.IsSplitOrMergeEnabledRequest) returns (.hbase.pb.IsSplitOrMergeEnabledResponse); + * + *
+       **
+       * Query whether the split or merge switch is on/off.
+       * 
+ */ + public abstract void isSplitOrMergeEnabled( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest request, + com.google.protobuf.RpcCallback done); + + /** * rpc Normalize(.hbase.pb.NormalizeRequest) returns (.hbase.pb.NormalizeResponse); * *
@@ -57919,6 +60134,22 @@ public final class MasterProtos {
         }
 
         @java.lang.Override
+        public  void setSplitOrMergeEnabled(
+            com.google.protobuf.RpcController controller,
+            org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest request,
+            com.google.protobuf.RpcCallback done) {
+          impl.setSplitOrMergeEnabled(controller, request, done);
+        }
+
+        @java.lang.Override
+        public  void isSplitOrMergeEnabled(
+            com.google.protobuf.RpcController controller,
+            org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest request,
+            com.google.protobuf.RpcCallback done) {
+          impl.isSplitOrMergeEnabled(controller, request, done);
+        }
+
+        @java.lang.Override
         public  void normalize(
             com.google.protobuf.RpcController controller,
             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest request,
@@ -58237,66 +60468,70 @@ public final class MasterProtos {
             case 23:
               return impl.isBalancerEnabled(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest)request);
             case 24:
-              return impl.normalize(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest)request);
+              return impl.setSplitOrMergeEnabled(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest)request);
             case 25:
-              return impl.setNormalizerRunning(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest)request);
+              return impl.isSplitOrMergeEnabled(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest)request);
             case 26:
-              return impl.isNormalizerEnabled(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest)request);
+              return impl.normalize(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest)request);
             case 27:
-              return impl.runCatalogScan(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest)request);
+              return impl.setNormalizerRunning(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest)request);
             case 28:
-              return impl.enableCatalogJanitor(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest)request);
+              return impl.isNormalizerEnabled(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest)request);
             case 29:
-              return impl.isCatalogJanitorEnabled(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest)request);
+              return impl.runCatalogScan(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest)request);
             case 30:
-              return impl.execMasterService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request);
+              return impl.enableCatalogJanitor(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest)request);
             case 31:
-              return impl.snapshot(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest)request);
+              return impl.isCatalogJanitorEnabled(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest)request);
             case 32:
-              return impl.getCompletedSnapshots(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest)request);
+              return impl.execMasterService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request);
             case 33:
-              return impl.deleteSnapshot(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest)request);
+              return impl.snapshot(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest)request);
             case 34:
-              return impl.isSnapshotDone(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest)request);
+              return impl.getCompletedSnapshots(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest)request);
             case 35:
-              return impl.restoreSnapshot(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest)request);
+              return impl.deleteSnapshot(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest)request);
             case 36:
-              return impl.isRestoreSnapshotDone(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest)request);
+              return impl.isSnapshotDone(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest)request);
             case 37:
-              return impl.execProcedure(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest)request);
+              return impl.restoreSnapshot(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest)request);
             case 38:
-              return impl.execProcedureWithRet(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest)request);
+              return impl.isRestoreSnapshotDone(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest)request);
             case 39:
-              return impl.isProcedureDone(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest)request);
+              return impl.execProcedure(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest)request);
             case 40:
-              return impl.modifyNamespace(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest)request);
+              return impl.execProcedureWithRet(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest)request);
             case 41:
-              return impl.createNamespace(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest)request);
+              return impl.isProcedureDone(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest)request);
             case 42:
-              return impl.deleteNamespace(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest)request);
+              return impl.modifyNamespace(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest)request);
             case 43:
-              return impl.getNamespaceDescriptor(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest)request);
+              return impl.createNamespace(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest)request);
             case 44:
-              return impl.listNamespaceDescriptors(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest)request);
+              return impl.deleteNamespace(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest)request);
             case 45:
-              return impl.listTableDescriptorsByNamespace(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest)request);
+              return impl.getNamespaceDescriptor(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest)request);
             case 46:
-              return impl.listTableNamesByNamespace(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest)request);
+              return impl.listNamespaceDescriptors(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest)request);
             case 47:
-              return impl.getTableState(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableStateRequest)request);
+              return impl.listTableDescriptorsByNamespace(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest)request);
             case 48:
-              return impl.setQuota(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest)request);
+              return impl.listTableNamesByNamespace(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest)request);
             case 49:
-              return impl.getLastMajorCompactionTimestamp(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest)request);
+              return impl.getTableState(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableStateRequest)request);
             case 50:
-              return impl.getLastMajorCompactionTimestampForRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest)request);
+              return impl.setQuota(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest)request);
             case 51:
-              return impl.getProcedureResult(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest)request);
+              return impl.getLastMajorCompactionTimestamp(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest)request);
             case 52:
-              return impl.getSecurityCapabilities(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest)request);
+              return impl.getLastMajorCompactionTimestampForRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest)request);
             case 53:
-              return impl.abortProcedure(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest)request);
+              return impl.getProcedureResult(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest)request);
             case 54:
+              return impl.getSecurityCapabilities(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest)request);
+            case 55:
+              return impl.abortProcedure(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest)request);
+            case 56:
               return impl.listProcedures(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest)request);
             default:
               throw new java.lang.AssertionError("Can't get here.");
@@ -58361,66 +60596,70 @@ public final class MasterProtos {
             case 23:
               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest.getDefaultInstance();
             case 24:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest.getDefaultInstance();
             case 25:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest.getDefaultInstance();
             case 26:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest.getDefaultInstance();
             case 27:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest.getDefaultInstance();
             case 28:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest.getDefaultInstance();
             case 29:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest.getDefaultInstance();
             case 30:
-              return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest.getDefaultInstance();
             case 31:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest.getDefaultInstance();
             case 32:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance();
             case 33:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest.getDefaultInstance();
             case 34:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest.getDefaultInstance();
             case 35:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest.getDefaultInstance();
             case 36:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest.getDefaultInstance();
             case 37:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest.getDefaultInstance();
             case 38:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest.getDefaultInstance();
             case 39:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest.getDefaultInstance();
             case 40:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest.getDefaultInstance();
             case 41:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest.getDefaultInstance();
             case 42:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest.getDefaultInstance();
             case 43:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest.getDefaultInstance();
             case 44:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest.getDefaultInstance();
             case 45:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest.getDefaultInstance();
             case 46:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest.getDefaultInstance();
             case 47:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableStateRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest.getDefaultInstance();
             case 48:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest.getDefaultInstance();
             case 49:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableStateRequest.getDefaultInstance();
             case 50:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest.getDefaultInstance();
             case 51:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest.getDefaultInstance();
             case 52:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest.getDefaultInstance();
             case 53:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest.getDefaultInstance();
             case 54:
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest.getDefaultInstance();
+            case 55:
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest.getDefaultInstance();
+            case 56:
               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest.getDefaultInstance();
             default:
               throw new java.lang.AssertionError("Can't get here.");
@@ -58485,66 +60724,70 @@ public final class MasterProtos {
             case 23:
               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse.getDefaultInstance();
             case 24:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse.getDefaultInstance();
             case 25:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse.getDefaultInstance();
             case 26:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.getDefaultInstance();
             case 27:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.getDefaultInstance();
             case 28:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse.getDefaultInstance();
             case 29:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse.getDefaultInstance();
             case 30:
-              return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse.getDefaultInstance();
             case 31:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance();
             case 32:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance();
             case 33:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse.getDefaultInstance();
             case 34:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse.getDefaultInstance();
             case 35:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse.getDefaultInstance();
             case 36:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse.getDefaultInstance();
             case 37:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse.getDefaultInstance();
             case 38:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse.getDefaultInstance();
             case 39:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance();
             case 40:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance();
             case 41:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse.getDefaultInstance();
             case 42:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse.getDefaultInstance();
             case 43:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse.getDefaultInstance();
             case 44:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse.getDefaultInstance();
             case 45:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse.getDefaultInstance();
             case 46:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse.getDefaultInstance();
             case 47:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableStateResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse.getDefaultInstance();
             case 48:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse.getDefaultInstance();
             case 49:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableStateResponse.getDefaultInstance();
             case 50:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse.getDefaultInstance();
             case 51:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.getDefaultInstance();
             case 52:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.getDefaultInstance();
             case 53:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse.getDefaultInstance();
             case 54:
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.getDefaultInstance();
+            case 55:
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse.getDefaultInstance();
+            case 56:
               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse.getDefaultInstance();
             default:
               throw new java.lang.AssertionError("Can't get here.");
@@ -58857,6 +61100,33 @@ public final class MasterProtos {
         com.google.protobuf.RpcCallback done);
 
     /**
+     * rpc SetSplitOrMergeEnabled(.hbase.pb.SetSplitOrMergeEnabledRequest) returns (.hbase.pb.SetSplitOrMergeEnabledResponse);
+     *
+     * 
+     **
+     * Turn the split or merge switch on or off.
+     * If synchronous is true, it waits until current operation call, if outstanding, to return.
+     * 
+ */ + public abstract void setSplitOrMergeEnabled( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest request, + com.google.protobuf.RpcCallback done); + + /** + * rpc IsSplitOrMergeEnabled(.hbase.pb.IsSplitOrMergeEnabledRequest) returns (.hbase.pb.IsSplitOrMergeEnabledResponse); + * + *
+     **
+     * Query whether the split or merge switch is on/off.
+     * 
+ */ + public abstract void isSplitOrMergeEnabled( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest request, + com.google.protobuf.RpcCallback done); + + /** * rpc Normalize(.hbase.pb.NormalizeRequest) returns (.hbase.pb.NormalizeResponse); * *
@@ -59383,156 +61653,166 @@ public final class MasterProtos {
               done));
           return;
         case 24:
+          this.setSplitOrMergeEnabled(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest)request,
+            com.google.protobuf.RpcUtil.specializeCallback(
+              done));
+          return;
+        case 25:
+          this.isSplitOrMergeEnabled(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest)request,
+            com.google.protobuf.RpcUtil.specializeCallback(
+              done));
+          return;
+        case 26:
           this.normalize(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 25:
+        case 27:
           this.setNormalizerRunning(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 26:
+        case 28:
           this.isNormalizerEnabled(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 27:
+        case 29:
           this.runCatalogScan(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 28:
+        case 30:
           this.enableCatalogJanitor(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 29:
+        case 31:
           this.isCatalogJanitorEnabled(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 30:
+        case 32:
           this.execMasterService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 31:
+        case 33:
           this.snapshot(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 32:
+        case 34:
           this.getCompletedSnapshots(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 33:
+        case 35:
           this.deleteSnapshot(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 34:
+        case 36:
           this.isSnapshotDone(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 35:
+        case 37:
           this.restoreSnapshot(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 36:
+        case 38:
           this.isRestoreSnapshotDone(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 37:
+        case 39:
           this.execProcedure(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 38:
+        case 40:
           this.execProcedureWithRet(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 39:
+        case 41:
           this.isProcedureDone(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 40:
+        case 42:
           this.modifyNamespace(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 41:
+        case 43:
           this.createNamespace(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 42:
+        case 44:
           this.deleteNamespace(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 43:
+        case 45:
           this.getNamespaceDescriptor(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 44:
+        case 46:
           this.listNamespaceDescriptors(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 45:
+        case 47:
           this.listTableDescriptorsByNamespace(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 46:
+        case 48:
           this.listTableNamesByNamespace(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 47:
+        case 49:
           this.getTableState(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableStateRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 48:
+        case 50:
           this.setQuota(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 49:
+        case 51:
           this.getLastMajorCompactionTimestamp(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 50:
+        case 52:
           this.getLastMajorCompactionTimestampForRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 51:
+        case 53:
           this.getProcedureResult(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 52:
+        case 54:
           this.getSecurityCapabilities(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 53:
+        case 55:
           this.abortProcedure(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 54:
+        case 56:
           this.listProcedures(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
@@ -59600,66 +61880,70 @@ public final class MasterProtos {
         case 23:
           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest.getDefaultInstance();
         case 24:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest.getDefaultInstance();
         case 25:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest.getDefaultInstance();
         case 26:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest.getDefaultInstance();
         case 27:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest.getDefaultInstance();
         case 28:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest.getDefaultInstance();
         case 29:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest.getDefaultInstance();
         case 30:
-          return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest.getDefaultInstance();
         case 31:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest.getDefaultInstance();
         case 32:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance();
         case 33:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest.getDefaultInstance();
         case 34:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest.getDefaultInstance();
         case 35:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest.getDefaultInstance();
         case 36:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest.getDefaultInstance();
         case 37:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest.getDefaultInstance();
         case 38:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest.getDefaultInstance();
         case 39:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest.getDefaultInstance();
         case 40:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest.getDefaultInstance();
         case 41:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest.getDefaultInstance();
         case 42:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest.getDefaultInstance();
         case 43:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest.getDefaultInstance();
         case 44:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest.getDefaultInstance();
         case 45:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest.getDefaultInstance();
         case 46:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest.getDefaultInstance();
         case 47:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableStateRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest.getDefaultInstance();
         case 48:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest.getDefaultInstance();
         case 49:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableStateRequest.getDefaultInstance();
         case 50:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest.getDefaultInstance();
         case 51:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest.getDefaultInstance();
         case 52:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest.getDefaultInstance();
         case 53:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest.getDefaultInstance();
         case 54:
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest.getDefaultInstance();
+        case 55:
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest.getDefaultInstance();
+        case 56:
           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest.getDefaultInstance();
         default:
           throw new java.lang.AssertionError("Can't get here.");
@@ -59724,66 +62008,70 @@ public final class MasterProtos {
         case 23:
           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse.getDefaultInstance();
         case 24:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse.getDefaultInstance();
         case 25:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse.getDefaultInstance();
         case 26:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.getDefaultInstance();
         case 27:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.getDefaultInstance();
         case 28:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse.getDefaultInstance();
         case 29:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse.getDefaultInstance();
         case 30:
-          return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse.getDefaultInstance();
         case 31:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance();
         case 32:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance();
         case 33:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse.getDefaultInstance();
         case 34:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse.getDefaultInstance();
         case 35:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse.getDefaultInstance();
         case 36:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse.getDefaultInstance();
         case 37:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse.getDefaultInstance();
         case 38:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse.getDefaultInstance();
         case 39:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance();
         case 40:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance();
         case 41:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse.getDefaultInstance();
         case 42:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse.getDefaultInstance();
         case 43:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse.getDefaultInstance();
         case 44:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse.getDefaultInstance();
         case 45:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse.getDefaultInstance();
         case 46:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse.getDefaultInstance();
         case 47:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableStateResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse.getDefaultInstance();
         case 48:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse.getDefaultInstance();
         case 49:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableStateResponse.getDefaultInstance();
         case 50:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse.getDefaultInstance();
         case 51:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.getDefaultInstance();
         case 52:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.getDefaultInstance();
         case 53:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse.getDefaultInstance();
         case 54:
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.getDefaultInstance();
+        case 55:
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse.getDefaultInstance();
+        case 56:
           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse.getDefaultInstance();
         default:
           throw new java.lang.AssertionError("Can't get here.");
@@ -60166,12 +62454,42 @@ public final class MasterProtos {
             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse.getDefaultInstance()));
       }
 
+      public  void setSplitOrMergeEnabled(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest request,
+          com.google.protobuf.RpcCallback done) {
+        channel.callMethod(
+          getDescriptor().getMethods().get(24),
+          controller,
+          request,
+          org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse.getDefaultInstance(),
+          com.google.protobuf.RpcUtil.generalizeCallback(
+            done,
+            org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse.class,
+            org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse.getDefaultInstance()));
+      }
+
+      public  void isSplitOrMergeEnabled(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest request,
+          com.google.protobuf.RpcCallback done) {
+        channel.callMethod(
+          getDescriptor().getMethods().get(25),
+          controller,
+          request,
+          org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse.getDefaultInstance(),
+          com.google.protobuf.RpcUtil.generalizeCallback(
+            done,
+            org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse.class,
+            org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse.getDefaultInstance()));
+      }
+
       public  void normalize(
           com.google.protobuf.RpcController controller,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(24),
+          getDescriptor().getMethods().get(26),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.getDefaultInstance(),
@@ -60186,7 +62504,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(25),
+          getDescriptor().getMethods().get(27),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.getDefaultInstance(),
@@ -60201,7 +62519,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(26),
+          getDescriptor().getMethods().get(28),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse.getDefaultInstance(),
@@ -60216,7 +62534,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(27),
+          getDescriptor().getMethods().get(29),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse.getDefaultInstance(),
@@ -60231,7 +62549,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(28),
+          getDescriptor().getMethods().get(30),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse.getDefaultInstance(),
@@ -60246,7 +62564,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(29),
+          getDescriptor().getMethods().get(31),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance(),
@@ -60261,7 +62579,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(30),
+          getDescriptor().getMethods().get(32),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(),
@@ -60276,7 +62594,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(31),
+          getDescriptor().getMethods().get(33),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse.getDefaultInstance(),
@@ -60291,7 +62609,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(32),
+          getDescriptor().getMethods().get(34),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse.getDefaultInstance(),
@@ -60306,7 +62624,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(33),
+          getDescriptor().getMethods().get(35),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse.getDefaultInstance(),
@@ -60321,7 +62639,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(34),
+          getDescriptor().getMethods().get(36),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse.getDefaultInstance(),
@@ -60336,7 +62654,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(35),
+          getDescriptor().getMethods().get(37),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse.getDefaultInstance(),
@@ -60351,7 +62669,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(36),
+          getDescriptor().getMethods().get(38),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse.getDefaultInstance(),
@@ -60366,7 +62684,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(37),
+          getDescriptor().getMethods().get(39),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance(),
@@ -60381,7 +62699,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(38),
+          getDescriptor().getMethods().get(40),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance(),
@@ -60396,7 +62714,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(39),
+          getDescriptor().getMethods().get(41),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse.getDefaultInstance(),
@@ -60411,7 +62729,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(40),
+          getDescriptor().getMethods().get(42),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse.getDefaultInstance(),
@@ -60426,7 +62744,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(41),
+          getDescriptor().getMethods().get(43),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse.getDefaultInstance(),
@@ -60441,7 +62759,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(42),
+          getDescriptor().getMethods().get(44),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse.getDefaultInstance(),
@@ -60456,7 +62774,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(43),
+          getDescriptor().getMethods().get(45),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse.getDefaultInstance(),
@@ -60471,7 +62789,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(44),
+          getDescriptor().getMethods().get(46),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse.getDefaultInstance(),
@@ -60486,7 +62804,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(45),
+          getDescriptor().getMethods().get(47),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse.getDefaultInstance(),
@@ -60501,7 +62819,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(46),
+          getDescriptor().getMethods().get(48),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse.getDefaultInstance(),
@@ -60516,7 +62834,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableStateRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(47),
+          getDescriptor().getMethods().get(49),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableStateResponse.getDefaultInstance(),
@@ -60531,7 +62849,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(48),
+          getDescriptor().getMethods().get(50),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse.getDefaultInstance(),
@@ -60546,7 +62864,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(49),
+          getDescriptor().getMethods().get(51),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.getDefaultInstance(),
@@ -60561,7 +62879,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(50),
+          getDescriptor().getMethods().get(52),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.getDefaultInstance(),
@@ -60576,7 +62894,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(51),
+          getDescriptor().getMethods().get(53),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse.getDefaultInstance(),
@@ -60591,7 +62909,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(52),
+          getDescriptor().getMethods().get(54),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.getDefaultInstance(),
@@ -60606,7 +62924,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(53),
+          getDescriptor().getMethods().get(55),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse.getDefaultInstance(),
@@ -60621,7 +62939,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(54),
+          getDescriptor().getMethods().get(56),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse.getDefaultInstance(),
@@ -60758,6 +63076,16 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest request)
           throws com.google.protobuf.ServiceException;
 
+      public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse setSplitOrMergeEnabled(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest request)
+          throws com.google.protobuf.ServiceException;
+
+      public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse isSplitOrMergeEnabled(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest request)
+          throws com.google.protobuf.ServiceException;
+
       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse normalize(
           com.google.protobuf.RpcController controller,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest request)
@@ -61209,12 +63537,36 @@ public final class MasterProtos {
       }
 
 
+      public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse setSplitOrMergeEnabled(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest request)
+          throws com.google.protobuf.ServiceException {
+        return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse) channel.callBlockingMethod(
+          getDescriptor().getMethods().get(24),
+          controller,
+          request,
+          org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse.getDefaultInstance());
+      }
+
+
+      public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse isSplitOrMergeEnabled(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest request)
+          throws com.google.protobuf.ServiceException {
+        return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse) channel.callBlockingMethod(
+          getDescriptor().getMethods().get(25),
+          controller,
+          request,
+          org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse.getDefaultInstance());
+      }
+
+
       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse normalize(
           com.google.protobuf.RpcController controller,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(24),
+          getDescriptor().getMethods().get(26),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.getDefaultInstance());
@@ -61226,7 +63578,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(25),
+          getDescriptor().getMethods().get(27),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.getDefaultInstance());
@@ -61238,7 +63590,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(26),
+          getDescriptor().getMethods().get(28),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse.getDefaultInstance());
@@ -61250,7 +63602,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(27),
+          getDescriptor().getMethods().get(29),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse.getDefaultInstance());
@@ -61262,7 +63614,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(28),
+          getDescriptor().getMethods().get(30),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse.getDefaultInstance());
@@ -61274,7 +63626,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(29),
+          getDescriptor().getMethods().get(31),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance());
@@ -61286,7 +63638,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(30),
+          getDescriptor().getMethods().get(32),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance());
@@ -61298,7 +63650,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(31),
+          getDescriptor().getMethods().get(33),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse.getDefaultInstance());
@@ -61310,7 +63662,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(32),
+          getDescriptor().getMethods().get(34),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse.getDefaultInstance());
@@ -61322,7 +63674,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(33),
+          getDescriptor().getMethods().get(35),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse.getDefaultInstance());
@@ -61334,7 +63686,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(34),
+          getDescriptor().getMethods().get(36),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse.getDefaultInstance());
@@ -61346,7 +63698,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(35),
+          getDescriptor().getMethods().get(37),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse.getDefaultInstance());
@@ -61358,7 +63710,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(36),
+          getDescriptor().getMethods().get(38),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse.getDefaultInstance());
@@ -61370,7 +63722,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(37),
+          getDescriptor().getMethods().get(39),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance());
@@ -61382,7 +63734,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(38),
+          getDescriptor().getMethods().get(40),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance());
@@ -61394,7 +63746,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(39),
+          getDescriptor().getMethods().get(41),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse.getDefaultInstance());
@@ -61406,7 +63758,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(40),
+          getDescriptor().getMethods().get(42),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse.getDefaultInstance());
@@ -61418,7 +63770,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(41),
+          getDescriptor().getMethods().get(43),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse.getDefaultInstance());
@@ -61430,7 +63782,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(42),
+          getDescriptor().getMethods().get(44),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse.getDefaultInstance());
@@ -61442,7 +63794,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(43),
+          getDescriptor().getMethods().get(45),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse.getDefaultInstance());
@@ -61454,7 +63806,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(44),
+          getDescriptor().getMethods().get(46),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse.getDefaultInstance());
@@ -61466,7 +63818,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(45),
+          getDescriptor().getMethods().get(47),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse.getDefaultInstance());
@@ -61478,7 +63830,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(46),
+          getDescriptor().getMethods().get(48),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse.getDefaultInstance());
@@ -61490,7 +63842,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableStateRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableStateResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(47),
+          getDescriptor().getMethods().get(49),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableStateResponse.getDefaultInstance());
@@ -61502,7 +63854,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(48),
+          getDescriptor().getMethods().get(50),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse.getDefaultInstance());
@@ -61514,7 +63866,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(49),
+          getDescriptor().getMethods().get(51),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.getDefaultInstance());
@@ -61526,7 +63878,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(50),
+          getDescriptor().getMethods().get(52),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.getDefaultInstance());
@@ -61538,7 +63890,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(51),
+          getDescriptor().getMethods().get(53),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse.getDefaultInstance());
@@ -61550,7 +63902,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(52),
+          getDescriptor().getMethods().get(54),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.getDefaultInstance());
@@ -61562,7 +63914,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(53),
+          getDescriptor().getMethods().get(55),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse.getDefaultInstance());
@@ -61574,7 +63926,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(54),
+          getDescriptor().getMethods().get(56),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse.getDefaultInstance());
@@ -61846,6 +64198,26 @@ public final class MasterProtos {
     com.google.protobuf.GeneratedMessage.FieldAccessorTable
       internal_static_hbase_pb_IsBalancerEnabledResponse_fieldAccessorTable;
   private static com.google.protobuf.Descriptors.Descriptor
+    internal_static_hbase_pb_SetSplitOrMergeEnabledRequest_descriptor;
+  private static
+    com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      internal_static_hbase_pb_SetSplitOrMergeEnabledRequest_fieldAccessorTable;
+  private static com.google.protobuf.Descriptors.Descriptor
+    internal_static_hbase_pb_SetSplitOrMergeEnabledResponse_descriptor;
+  private static
+    com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      internal_static_hbase_pb_SetSplitOrMergeEnabledResponse_fieldAccessorTable;
+  private static com.google.protobuf.Descriptors.Descriptor
+    internal_static_hbase_pb_IsSplitOrMergeEnabledRequest_descriptor;
+  private static
+    com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      internal_static_hbase_pb_IsSplitOrMergeEnabledRequest_fieldAccessorTable;
+  private static com.google.protobuf.Descriptors.Descriptor
+    internal_static_hbase_pb_IsSplitOrMergeEnabledResponse_descriptor;
+  private static
+    com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      internal_static_hbase_pb_IsSplitOrMergeEnabledResponse_fieldAccessorTable;
+  private static com.google.protobuf.Descriptors.Descriptor
     internal_static_hbase_pb_NormalizeRequest_descriptor;
   private static
     com.google.protobuf.GeneratedMessage.FieldAccessorTable
@@ -62211,225 +64583,239 @@ public final class MasterProtos {
       "ncerRunningResponse\022\032\n\022prev_balance_valu",
       "e\030\001 \001(\010\"\032\n\030IsBalancerEnabledRequest\",\n\031I" +
       "sBalancerEnabledResponse\022\017\n\007enabled\030\001 \002(" +
-      "\010\"\022\n\020NormalizeRequest\"+\n\021NormalizeRespon" +
-      "se\022\026\n\016normalizer_ran\030\001 \002(\010\")\n\033SetNormali" +
-      "zerRunningRequest\022\n\n\002on\030\001 \002(\010\"=\n\034SetNorm" +
-      "alizerRunningResponse\022\035\n\025prev_normalizer" +
-      "_value\030\001 \001(\010\"\034\n\032IsNormalizerEnabledReque" +
-      "st\".\n\033IsNormalizerEnabledResponse\022\017\n\007ena" +
-      "bled\030\001 \002(\010\"\027\n\025RunCatalogScanRequest\"-\n\026R" +
-      "unCatalogScanResponse\022\023\n\013scan_result\030\001 \001",
-      "(\005\"-\n\033EnableCatalogJanitorRequest\022\016\n\006ena" +
-      "ble\030\001 \002(\010\"2\n\034EnableCatalogJanitorRespons" +
-      "e\022\022\n\nprev_value\030\001 \001(\010\" \n\036IsCatalogJanito" +
-      "rEnabledRequest\"0\n\037IsCatalogJanitorEnabl" +
-      "edResponse\022\r\n\005value\030\001 \002(\010\"B\n\017SnapshotReq" +
-      "uest\022/\n\010snapshot\030\001 \002(\0132\035.hbase.pb.Snapsh" +
-      "otDescription\",\n\020SnapshotResponse\022\030\n\020exp" +
-      "ected_timeout\030\001 \002(\003\"\036\n\034GetCompletedSnaps" +
-      "hotsRequest\"Q\n\035GetCompletedSnapshotsResp" +
-      "onse\0220\n\tsnapshots\030\001 \003(\0132\035.hbase.pb.Snaps",
-      "hotDescription\"H\n\025DeleteSnapshotRequest\022" +
-      "/\n\010snapshot\030\001 \002(\0132\035.hbase.pb.SnapshotDes" +
-      "cription\"\030\n\026DeleteSnapshotResponse\"I\n\026Re" +
-      "storeSnapshotRequest\022/\n\010snapshot\030\001 \002(\0132\035" +
-      ".hbase.pb.SnapshotDescription\"\031\n\027Restore" +
-      "SnapshotResponse\"H\n\025IsSnapshotDoneReques" +
-      "t\022/\n\010snapshot\030\001 \001(\0132\035.hbase.pb.SnapshotD" +
-      "escription\"^\n\026IsSnapshotDoneResponse\022\023\n\004" +
-      "done\030\001 \001(\010:\005false\022/\n\010snapshot\030\002 \001(\0132\035.hb" +
-      "ase.pb.SnapshotDescription\"O\n\034IsRestoreS",
-      "napshotDoneRequest\022/\n\010snapshot\030\001 \001(\0132\035.h" +
-      "base.pb.SnapshotDescription\"4\n\035IsRestore" +
-      "SnapshotDoneResponse\022\023\n\004done\030\001 \001(\010:\005fals" +
-      "e\"F\n\033GetSchemaAlterStatusRequest\022\'\n\ntabl" +
-      "e_name\030\001 \002(\0132\023.hbase.pb.TableName\"T\n\034Get" +
-      "SchemaAlterStatusResponse\022\035\n\025yet_to_upda" +
-      "te_regions\030\001 \001(\r\022\025\n\rtotal_regions\030\002 \001(\r\"" +
-      "\213\001\n\032GetTableDescriptorsRequest\022(\n\013table_" +
-      "names\030\001 \003(\0132\023.hbase.pb.TableName\022\r\n\005rege" +
-      "x\030\002 \001(\t\022!\n\022include_sys_tables\030\003 \001(\010:\005fal",
-      "se\022\021\n\tnamespace\030\004 \001(\t\"J\n\033GetTableDescrip" +
-      "torsResponse\022+\n\014table_schema\030\001 \003(\0132\025.hba" +
-      "se.pb.TableSchema\"[\n\024GetTableNamesReques" +
-      "t\022\r\n\005regex\030\001 \001(\t\022!\n\022include_sys_tables\030\002" +
-      " \001(\010:\005false\022\021\n\tnamespace\030\003 \001(\t\"A\n\025GetTab" +
-      "leNamesResponse\022(\n\013table_names\030\001 \003(\0132\023.h" +
-      "base.pb.TableName\"?\n\024GetTableStateReques" +
-      "t\022\'\n\ntable_name\030\001 \002(\0132\023.hbase.pb.TableNa" +
-      "me\"B\n\025GetTableStateResponse\022)\n\013table_sta" +
-      "te\030\001 \002(\0132\024.hbase.pb.TableState\"\031\n\027GetClu",
-      "sterStatusRequest\"K\n\030GetClusterStatusRes" +
-      "ponse\022/\n\016cluster_status\030\001 \002(\0132\027.hbase.pb" +
-      ".ClusterStatus\"\030\n\026IsMasterRunningRequest" +
-      "\"4\n\027IsMasterRunningResponse\022\031\n\021is_master" +
-      "_running\030\001 \002(\010\"I\n\024ExecProcedureRequest\0221" +
-      "\n\tprocedure\030\001 \002(\0132\036.hbase.pb.ProcedureDe" +
-      "scription\"F\n\025ExecProcedureResponse\022\030\n\020ex" +
-      "pected_timeout\030\001 \001(\003\022\023\n\013return_data\030\002 \001(" +
-      "\014\"K\n\026IsProcedureDoneRequest\0221\n\tprocedure" +
-      "\030\001 \001(\0132\036.hbase.pb.ProcedureDescription\"`",
-      "\n\027IsProcedureDoneResponse\022\023\n\004done\030\001 \001(\010:" +
-      "\005false\0220\n\010snapshot\030\002 \001(\0132\036.hbase.pb.Proc" +
-      "edureDescription\",\n\031GetProcedureResultRe" +
-      "quest\022\017\n\007proc_id\030\001 \002(\004\"\371\001\n\032GetProcedureR" +
-      "esultResponse\0229\n\005state\030\001 \002(\0162*.hbase.pb." +
-      "GetProcedureResultResponse.State\022\022\n\nstar" +
-      "t_time\030\002 \001(\004\022\023\n\013last_update\030\003 \001(\004\022\016\n\006res" +
-      "ult\030\004 \001(\014\0224\n\texception\030\005 \001(\0132!.hbase.pb." +
-      "ForeignExceptionMessage\"1\n\005State\022\r\n\tNOT_" +
-      "FOUND\020\000\022\013\n\007RUNNING\020\001\022\014\n\010FINISHED\020\002\"M\n\025Ab",
-      "ortProcedureRequest\022\017\n\007proc_id\030\001 \002(\004\022#\n\025" +
-      "mayInterruptIfRunning\030\002 \001(\010:\004true\"6\n\026Abo" +
-      "rtProcedureResponse\022\034\n\024is_procedure_abor" +
-      "ted\030\001 \002(\010\"\027\n\025ListProceduresRequest\"@\n\026Li" +
-      "stProceduresResponse\022&\n\tprocedure\030\001 \003(\0132" +
-      "\023.hbase.pb.Procedure\"\315\001\n\017SetQuotaRequest" +
-      "\022\021\n\tuser_name\030\001 \001(\t\022\022\n\nuser_group\030\002 \001(\t\022" +
-      "\021\n\tnamespace\030\003 \001(\t\022\'\n\ntable_name\030\004 \001(\0132\023" +
-      ".hbase.pb.TableName\022\022\n\nremove_all\030\005 \001(\010\022" +
-      "\026\n\016bypass_globals\030\006 \001(\010\022+\n\010throttle\030\007 \001(",
-      "\0132\031.hbase.pb.ThrottleRequest\"\022\n\020SetQuota" +
-      "Response\"J\n\037MajorCompactionTimestampRequ" +
-      "est\022\'\n\ntable_name\030\001 \002(\0132\023.hbase.pb.Table" +
-      "Name\"U\n(MajorCompactionTimestampForRegio" +
-      "nRequest\022)\n\006region\030\001 \002(\0132\031.hbase.pb.Regi" +
-      "onSpecifier\"@\n MajorCompactionTimestampR" +
-      "esponse\022\034\n\024compaction_timestamp\030\001 \002(\003\"\035\n" +
-      "\033SecurityCapabilitiesRequest\"\354\001\n\034Securit" +
-      "yCapabilitiesResponse\022G\n\014capabilities\030\001 " +
-      "\003(\01621.hbase.pb.SecurityCapabilitiesRespo",
-      "nse.Capability\"\202\001\n\nCapability\022\031\n\025SIMPLE_" +
-      "AUTHENTICATION\020\000\022\031\n\025SECURE_AUTHENTICATIO" +
-      "N\020\001\022\021\n\rAUTHORIZATION\020\002\022\026\n\022CELL_AUTHORIZA" +
-      "TION\020\003\022\023\n\017CELL_VISIBILITY\020\0042\374&\n\rMasterSe" +
-      "rvice\022e\n\024GetSchemaAlterStatus\022%.hbase.pb" +
-      ".GetSchemaAlterStatusRequest\032&.hbase.pb." +
-      "GetSchemaAlterStatusResponse\022b\n\023GetTable" +
-      "Descriptors\022$.hbase.pb.GetTableDescripto" +
-      "rsRequest\032%.hbase.pb.GetTableDescriptors" +
-      "Response\022P\n\rGetTableNames\022\036.hbase.pb.Get",
-      "TableNamesRequest\032\037.hbase.pb.GetTableNam" +
-      "esResponse\022Y\n\020GetClusterStatus\022!.hbase.p" +
-      "b.GetClusterStatusRequest\032\".hbase.pb.Get" +
-      "ClusterStatusResponse\022V\n\017IsMasterRunning" +
-      "\022 .hbase.pb.IsMasterRunningRequest\032!.hba" +
-      "se.pb.IsMasterRunningResponse\022D\n\tAddColu" +
-      "mn\022\032.hbase.pb.AddColumnRequest\032\033.hbase.p" +
-      "b.AddColumnResponse\022M\n\014DeleteColumn\022\035.hb" +
-      "ase.pb.DeleteColumnRequest\032\036.hbase.pb.De" +
-      "leteColumnResponse\022M\n\014ModifyColumn\022\035.hba",
-      "se.pb.ModifyColumnRequest\032\036.hbase.pb.Mod" +
-      "ifyColumnResponse\022G\n\nMoveRegion\022\033.hbase." +
-      "pb.MoveRegionRequest\032\034.hbase.pb.MoveRegi" +
-      "onResponse\022k\n\026DispatchMergingRegions\022\'.h" +
-      "base.pb.DispatchMergingRegionsRequest\032(." +
-      "hbase.pb.DispatchMergingRegionsResponse\022" +
-      "M\n\014AssignRegion\022\035.hbase.pb.AssignRegionR" +
-      "equest\032\036.hbase.pb.AssignRegionResponse\022S" +
-      "\n\016UnassignRegion\022\037.hbase.pb.UnassignRegi" +
-      "onRequest\032 .hbase.pb.UnassignRegionRespo",
-      "nse\022P\n\rOfflineRegion\022\036.hbase.pb.OfflineR" +
-      "egionRequest\032\037.hbase.pb.OfflineRegionRes" +
-      "ponse\022J\n\013DeleteTable\022\034.hbase.pb.DeleteTa" +
-      "bleRequest\032\035.hbase.pb.DeleteTableRespons" +
-      "e\022P\n\rtruncateTable\022\036.hbase.pb.TruncateTa" +
-      "bleRequest\032\037.hbase.pb.TruncateTableRespo" +
-      "nse\022J\n\013EnableTable\022\034.hbase.pb.EnableTabl" +
-      "eRequest\032\035.hbase.pb.EnableTableResponse\022" +
-      "M\n\014DisableTable\022\035.hbase.pb.DisableTableR" +
-      "equest\032\036.hbase.pb.DisableTableResponse\022J",
-      "\n\013ModifyTable\022\034.hbase.pb.ModifyTableRequ" +
-      "est\032\035.hbase.pb.ModifyTableResponse\022J\n\013Cr" +
-      "eateTable\022\034.hbase.pb.CreateTableRequest\032" +
-      "\035.hbase.pb.CreateTableResponse\022A\n\010Shutdo" +
-      "wn\022\031.hbase.pb.ShutdownRequest\032\032.hbase.pb" +
-      ".ShutdownResponse\022G\n\nStopMaster\022\033.hbase." +
-      "pb.StopMasterRequest\032\034.hbase.pb.StopMast" +
-      "erResponse\022>\n\007Balance\022\030.hbase.pb.Balance" +
-      "Request\032\031.hbase.pb.BalanceResponse\022_\n\022Se" +
-      "tBalancerRunning\022#.hbase.pb.SetBalancerR",
-      "unningRequest\032$.hbase.pb.SetBalancerRunn" +
-      "ingResponse\022\\\n\021IsBalancerEnabled\022\".hbase" +
-      ".pb.IsBalancerEnabledRequest\032#.hbase.pb." +
-      "IsBalancerEnabledResponse\022D\n\tNormalize\022\032" +
-      ".hbase.pb.NormalizeRequest\032\033.hbase.pb.No" +
-      "rmalizeResponse\022e\n\024SetNormalizerRunning\022" +
-      "%.hbase.pb.SetNormalizerRunningRequest\032&" +
-      ".hbase.pb.SetNormalizerRunningResponse\022b" +
-      "\n\023IsNormalizerEnabled\022$.hbase.pb.IsNorma" +
-      "lizerEnabledRequest\032%.hbase.pb.IsNormali",
-      "zerEnabledResponse\022S\n\016RunCatalogScan\022\037.h" +
-      "base.pb.RunCatalogScanRequest\032 .hbase.pb" +
-      ".RunCatalogScanResponse\022e\n\024EnableCatalog" +
-      "Janitor\022%.hbase.pb.EnableCatalogJanitorR" +
-      "equest\032&.hbase.pb.EnableCatalogJanitorRe" +
-      "sponse\022n\n\027IsCatalogJanitorEnabled\022(.hbas" +
-      "e.pb.IsCatalogJanitorEnabledRequest\032).hb" +
-      "ase.pb.IsCatalogJanitorEnabledResponse\022^" +
-      "\n\021ExecMasterService\022#.hbase.pb.Coprocess" +
-      "orServiceRequest\032$.hbase.pb.CoprocessorS",
-      "erviceResponse\022A\n\010Snapshot\022\031.hbase.pb.Sn" +
-      "apshotRequest\032\032.hbase.pb.SnapshotRespons" +
-      "e\022h\n\025GetCompletedSnapshots\022&.hbase.pb.Ge" +
-      "tCompletedSnapshotsRequest\032\'.hbase.pb.Ge" +
-      "tCompletedSnapshotsResponse\022S\n\016DeleteSna" +
-      "pshot\022\037.hbase.pb.DeleteSnapshotRequest\032 " +
-      ".hbase.pb.DeleteSnapshotResponse\022S\n\016IsSn" +
-      "apshotDone\022\037.hbase.pb.IsSnapshotDoneRequ" +
-      "est\032 .hbase.pb.IsSnapshotDoneResponse\022V\n" +
-      "\017RestoreSnapshot\022 .hbase.pb.RestoreSnaps",
-      "hotRequest\032!.hbase.pb.RestoreSnapshotRes" +
-      "ponse\022h\n\025IsRestoreSnapshotDone\022&.hbase.p" +
-      "b.IsRestoreSnapshotDoneRequest\032\'.hbase.p" +
-      "b.IsRestoreSnapshotDoneResponse\022P\n\rExecP" +
-      "rocedure\022\036.hbase.pb.ExecProcedureRequest" +
-      "\032\037.hbase.pb.ExecProcedureResponse\022W\n\024Exe" +
-      "cProcedureWithRet\022\036.hbase.pb.ExecProcedu" +
-      "reRequest\032\037.hbase.pb.ExecProcedureRespon" +
-      "se\022V\n\017IsProcedureDone\022 .hbase.pb.IsProce" +
-      "dureDoneRequest\032!.hbase.pb.IsProcedureDo",
-      "neResponse\022V\n\017ModifyNamespace\022 .hbase.pb" +
-      ".ModifyNamespaceRequest\032!.hbase.pb.Modif" +
-      "yNamespaceResponse\022V\n\017CreateNamespace\022 ." +
-      "hbase.pb.CreateNamespaceRequest\032!.hbase." +
-      "pb.CreateNamespaceResponse\022V\n\017DeleteName" +
-      "space\022 .hbase.pb.DeleteNamespaceRequest\032" +
-      "!.hbase.pb.DeleteNamespaceResponse\022k\n\026Ge" +
-      "tNamespaceDescriptor\022\'.hbase.pb.GetNames" +
-      "paceDescriptorRequest\032(.hbase.pb.GetName" +
-      "spaceDescriptorResponse\022q\n\030ListNamespace",
-      "Descriptors\022).hbase.pb.ListNamespaceDesc" +
-      "riptorsRequest\032*.hbase.pb.ListNamespaceD" +
-      "escriptorsResponse\022\206\001\n\037ListTableDescript" +
-      "orsByNamespace\0220.hbase.pb.ListTableDescr" +
-      "iptorsByNamespaceRequest\0321.hbase.pb.List" +
-      "TableDescriptorsByNamespaceResponse\022t\n\031L" +
-      "istTableNamesByNamespace\022*.hbase.pb.List" +
-      "TableNamesByNamespaceRequest\032+.hbase.pb." +
-      "ListTableNamesByNamespaceResponse\022P\n\rGet" +
-      "TableState\022\036.hbase.pb.GetTableStateReque",
-      "st\032\037.hbase.pb.GetTableStateResponse\022A\n\010S" +
-      "etQuota\022\031.hbase.pb.SetQuotaRequest\032\032.hba" +
-      "se.pb.SetQuotaResponse\022x\n\037getLastMajorCo" +
-      "mpactionTimestamp\022).hbase.pb.MajorCompac" +
-      "tionTimestampRequest\032*.hbase.pb.MajorCom" +
-      "pactionTimestampResponse\022\212\001\n(getLastMajo" +
-      "rCompactionTimestampForRegion\0222.hbase.pb" +
-      ".MajorCompactionTimestampForRegionReques" +
-      "t\032*.hbase.pb.MajorCompactionTimestampRes" +
-      "ponse\022_\n\022getProcedureResult\022#.hbase.pb.G",
-      "etProcedureResultRequest\032$.hbase.pb.GetP" +
-      "rocedureResultResponse\022h\n\027getSecurityCap" +
-      "abilities\022%.hbase.pb.SecurityCapabilitie" +
-      "sRequest\032&.hbase.pb.SecurityCapabilities" +
-      "Response\022S\n\016AbortProcedure\022\037.hbase.pb.Ab" +
-      "ortProcedureRequest\032 .hbase.pb.AbortProc" +
-      "edureResponse\022S\n\016ListProcedures\022\037.hbase." +
-      "pb.ListProceduresRequest\032 .hbase.pb.List" +
-      "ProceduresResponseBB\n*org.apache.hadoop." +
-      "hbase.protobuf.generatedB\014MasterProtosH\001",
-      "\210\001\001\240\001\001"
+      "\010\"w\n\035SetSplitOrMergeEnabledRequest\022\017\n\007en" +
+      "abled\030\001 \002(\010\022\023\n\013synchronous\030\002 \001(\010\0220\n\014swit" +
+      "ch_types\030\003 \003(\0162\032.hbase.pb.MasterSwitchTy" +
+      "pe\"4\n\036SetSplitOrMergeEnabledResponse\022\022\n\n" +
+      "prev_value\030\001 \003(\010\"O\n\034IsSplitOrMergeEnable" +
+      "dRequest\022/\n\013switch_type\030\001 \002(\0162\032.hbase.pb" +
+      ".MasterSwitchType\"0\n\035IsSplitOrMergeEnabl" +
+      "edResponse\022\017\n\007enabled\030\001 \002(\010\"\022\n\020Normalize",
+      "Request\"+\n\021NormalizeResponse\022\026\n\016normaliz" +
+      "er_ran\030\001 \002(\010\")\n\033SetNormalizerRunningRequ" +
+      "est\022\n\n\002on\030\001 \002(\010\"=\n\034SetNormalizerRunningR" +
+      "esponse\022\035\n\025prev_normalizer_value\030\001 \001(\010\"\034" +
+      "\n\032IsNormalizerEnabledRequest\".\n\033IsNormal" +
+      "izerEnabledResponse\022\017\n\007enabled\030\001 \002(\010\"\027\n\025" +
+      "RunCatalogScanRequest\"-\n\026RunCatalogScanR" +
+      "esponse\022\023\n\013scan_result\030\001 \001(\005\"-\n\033EnableCa" +
+      "talogJanitorRequest\022\016\n\006enable\030\001 \002(\010\"2\n\034E" +
+      "nableCatalogJanitorResponse\022\022\n\nprev_valu",
+      "e\030\001 \001(\010\" \n\036IsCatalogJanitorEnabledReques" +
+      "t\"0\n\037IsCatalogJanitorEnabledResponse\022\r\n\005" +
+      "value\030\001 \002(\010\"B\n\017SnapshotRequest\022/\n\010snapsh" +
+      "ot\030\001 \002(\0132\035.hbase.pb.SnapshotDescription\"" +
+      ",\n\020SnapshotResponse\022\030\n\020expected_timeout\030" +
+      "\001 \002(\003\"\036\n\034GetCompletedSnapshotsRequest\"Q\n" +
+      "\035GetCompletedSnapshotsResponse\0220\n\tsnapsh" +
+      "ots\030\001 \003(\0132\035.hbase.pb.SnapshotDescription" +
+      "\"H\n\025DeleteSnapshotRequest\022/\n\010snapshot\030\001 " +
+      "\002(\0132\035.hbase.pb.SnapshotDescription\"\030\n\026De",
+      "leteSnapshotResponse\"I\n\026RestoreSnapshotR" +
+      "equest\022/\n\010snapshot\030\001 \002(\0132\035.hbase.pb.Snap" +
+      "shotDescription\"\031\n\027RestoreSnapshotRespon" +
+      "se\"H\n\025IsSnapshotDoneRequest\022/\n\010snapshot\030" +
+      "\001 \001(\0132\035.hbase.pb.SnapshotDescription\"^\n\026" +
+      "IsSnapshotDoneResponse\022\023\n\004done\030\001 \001(\010:\005fa" +
+      "lse\022/\n\010snapshot\030\002 \001(\0132\035.hbase.pb.Snapsho" +
+      "tDescription\"O\n\034IsRestoreSnapshotDoneReq" +
+      "uest\022/\n\010snapshot\030\001 \001(\0132\035.hbase.pb.Snapsh" +
+      "otDescription\"4\n\035IsRestoreSnapshotDoneRe",
+      "sponse\022\023\n\004done\030\001 \001(\010:\005false\"F\n\033GetSchema" +
+      "AlterStatusRequest\022\'\n\ntable_name\030\001 \002(\0132\023" +
+      ".hbase.pb.TableName\"T\n\034GetSchemaAlterSta" +
+      "tusResponse\022\035\n\025yet_to_update_regions\030\001 \001" +
+      "(\r\022\025\n\rtotal_regions\030\002 \001(\r\"\213\001\n\032GetTableDe" +
+      "scriptorsRequest\022(\n\013table_names\030\001 \003(\0132\023." +
+      "hbase.pb.TableName\022\r\n\005regex\030\002 \001(\t\022!\n\022inc" +
+      "lude_sys_tables\030\003 \001(\010:\005false\022\021\n\tnamespac" +
+      "e\030\004 \001(\t\"J\n\033GetTableDescriptorsResponse\022+" +
+      "\n\014table_schema\030\001 \003(\0132\025.hbase.pb.TableSch",
+      "ema\"[\n\024GetTableNamesRequest\022\r\n\005regex\030\001 \001" +
+      "(\t\022!\n\022include_sys_tables\030\002 \001(\010:\005false\022\021\n" +
+      "\tnamespace\030\003 \001(\t\"A\n\025GetTableNamesRespons" +
+      "e\022(\n\013table_names\030\001 \003(\0132\023.hbase.pb.TableN" +
+      "ame\"?\n\024GetTableStateRequest\022\'\n\ntable_nam" +
+      "e\030\001 \002(\0132\023.hbase.pb.TableName\"B\n\025GetTable" +
+      "StateResponse\022)\n\013table_state\030\001 \002(\0132\024.hba" +
+      "se.pb.TableState\"\031\n\027GetClusterStatusRequ" +
+      "est\"K\n\030GetClusterStatusResponse\022/\n\016clust" +
+      "er_status\030\001 \002(\0132\027.hbase.pb.ClusterStatus",
+      "\"\030\n\026IsMasterRunningRequest\"4\n\027IsMasterRu" +
+      "nningResponse\022\031\n\021is_master_running\030\001 \002(\010" +
+      "\"I\n\024ExecProcedureRequest\0221\n\tprocedure\030\001 " +
+      "\002(\0132\036.hbase.pb.ProcedureDescription\"F\n\025E" +
+      "xecProcedureResponse\022\030\n\020expected_timeout" +
+      "\030\001 \001(\003\022\023\n\013return_data\030\002 \001(\014\"K\n\026IsProcedu" +
+      "reDoneRequest\0221\n\tprocedure\030\001 \001(\0132\036.hbase" +
+      ".pb.ProcedureDescription\"`\n\027IsProcedureD" +
+      "oneResponse\022\023\n\004done\030\001 \001(\010:\005false\0220\n\010snap" +
+      "shot\030\002 \001(\0132\036.hbase.pb.ProcedureDescripti",
+      "on\",\n\031GetProcedureResultRequest\022\017\n\007proc_" +
+      "id\030\001 \002(\004\"\371\001\n\032GetProcedureResultResponse\022" +
+      "9\n\005state\030\001 \002(\0162*.hbase.pb.GetProcedureRe" +
+      "sultResponse.State\022\022\n\nstart_time\030\002 \001(\004\022\023" +
+      "\n\013last_update\030\003 \001(\004\022\016\n\006result\030\004 \001(\014\0224\n\te" +
+      "xception\030\005 \001(\0132!.hbase.pb.ForeignExcepti" +
+      "onMessage\"1\n\005State\022\r\n\tNOT_FOUND\020\000\022\013\n\007RUN" +
+      "NING\020\001\022\014\n\010FINISHED\020\002\"M\n\025AbortProcedureRe" +
+      "quest\022\017\n\007proc_id\030\001 \002(\004\022#\n\025mayInterruptIf" +
+      "Running\030\002 \001(\010:\004true\"6\n\026AbortProcedureRes",
+      "ponse\022\034\n\024is_procedure_aborted\030\001 \002(\010\"\027\n\025L" +
+      "istProceduresRequest\"@\n\026ListProceduresRe" +
+      "sponse\022&\n\tprocedure\030\001 \003(\0132\023.hbase.pb.Pro" +
+      "cedure\"\315\001\n\017SetQuotaRequest\022\021\n\tuser_name\030" +
+      "\001 \001(\t\022\022\n\nuser_group\030\002 \001(\t\022\021\n\tnamespace\030\003" +
+      " \001(\t\022\'\n\ntable_name\030\004 \001(\0132\023.hbase.pb.Tabl" +
+      "eName\022\022\n\nremove_all\030\005 \001(\010\022\026\n\016bypass_glob" +
+      "als\030\006 \001(\010\022+\n\010throttle\030\007 \001(\0132\031.hbase.pb.T" +
+      "hrottleRequest\"\022\n\020SetQuotaResponse\"J\n\037Ma" +
+      "jorCompactionTimestampRequest\022\'\n\ntable_n",
+      "ame\030\001 \002(\0132\023.hbase.pb.TableName\"U\n(MajorC" +
+      "ompactionTimestampForRegionRequest\022)\n\006re" +
+      "gion\030\001 \002(\0132\031.hbase.pb.RegionSpecifier\"@\n" +
+      " MajorCompactionTimestampResponse\022\034\n\024com" +
+      "paction_timestamp\030\001 \002(\003\"\035\n\033SecurityCapab" +
+      "ilitiesRequest\"\354\001\n\034SecurityCapabilitiesR" +
+      "esponse\022G\n\014capabilities\030\001 \003(\01621.hbase.pb" +
+      ".SecurityCapabilitiesResponse.Capability" +
+      "\"\202\001\n\nCapability\022\031\n\025SIMPLE_AUTHENTICATION" +
+      "\020\000\022\031\n\025SECURE_AUTHENTICATION\020\001\022\021\n\rAUTHORI",
+      "ZATION\020\002\022\026\n\022CELL_AUTHORIZATION\020\003\022\023\n\017CELL" +
+      "_VISIBILITY\020\004*(\n\020MasterSwitchType\022\t\n\005SPL" +
+      "IT\020\000\022\t\n\005MERGE\020\0012\323(\n\rMasterService\022e\n\024Get" +
+      "SchemaAlterStatus\022%.hbase.pb.GetSchemaAl" +
+      "terStatusRequest\032&.hbase.pb.GetSchemaAlt" +
+      "erStatusResponse\022b\n\023GetTableDescriptors\022" +
+      "$.hbase.pb.GetTableDescriptorsRequest\032%." +
+      "hbase.pb.GetTableDescriptorsResponse\022P\n\r" +
+      "GetTableNames\022\036.hbase.pb.GetTableNamesRe" +
+      "quest\032\037.hbase.pb.GetTableNamesResponse\022Y",
+      "\n\020GetClusterStatus\022!.hbase.pb.GetCluster" +
+      "StatusRequest\032\".hbase.pb.GetClusterStatu" +
+      "sResponse\022V\n\017IsMasterRunning\022 .hbase.pb." +
+      "IsMasterRunningRequest\032!.hbase.pb.IsMast" +
+      "erRunningResponse\022D\n\tAddColumn\022\032.hbase.p" +
+      "b.AddColumnRequest\032\033.hbase.pb.AddColumnR" +
+      "esponse\022M\n\014DeleteColumn\022\035.hbase.pb.Delet" +
+      "eColumnRequest\032\036.hbase.pb.DeleteColumnRe" +
+      "sponse\022M\n\014ModifyColumn\022\035.hbase.pb.Modify" +
+      "ColumnRequest\032\036.hbase.pb.ModifyColumnRes",
+      "ponse\022G\n\nMoveRegion\022\033.hbase.pb.MoveRegio" +
+      "nRequest\032\034.hbase.pb.MoveRegionResponse\022k" +
+      "\n\026DispatchMergingRegions\022\'.hbase.pb.Disp" +
+      "atchMergingRegionsRequest\032(.hbase.pb.Dis" +
+      "patchMergingRegionsResponse\022M\n\014AssignReg" +
+      "ion\022\035.hbase.pb.AssignRegionRequest\032\036.hba" +
+      "se.pb.AssignRegionResponse\022S\n\016UnassignRe" +
+      "gion\022\037.hbase.pb.UnassignRegionRequest\032 ." +
+      "hbase.pb.UnassignRegionResponse\022P\n\rOffli" +
+      "neRegion\022\036.hbase.pb.OfflineRegionRequest",
+      "\032\037.hbase.pb.OfflineRegionResponse\022J\n\013Del" +
+      "eteTable\022\034.hbase.pb.DeleteTableRequest\032\035" +
+      ".hbase.pb.DeleteTableResponse\022P\n\rtruncat" +
+      "eTable\022\036.hbase.pb.TruncateTableRequest\032\037" +
+      ".hbase.pb.TruncateTableResponse\022J\n\013Enabl" +
+      "eTable\022\034.hbase.pb.EnableTableRequest\032\035.h" +
+      "base.pb.EnableTableResponse\022M\n\014DisableTa" +
+      "ble\022\035.hbase.pb.DisableTableRequest\032\036.hba" +
+      "se.pb.DisableTableResponse\022J\n\013ModifyTabl" +
+      "e\022\034.hbase.pb.ModifyTableRequest\032\035.hbase.",
+      "pb.ModifyTableResponse\022J\n\013CreateTable\022\034." +
+      "hbase.pb.CreateTableRequest\032\035.hbase.pb.C" +
+      "reateTableResponse\022A\n\010Shutdown\022\031.hbase.p" +
+      "b.ShutdownRequest\032\032.hbase.pb.ShutdownRes" +
+      "ponse\022G\n\nStopMaster\022\033.hbase.pb.StopMaste" +
+      "rRequest\032\034.hbase.pb.StopMasterResponse\022>" +
+      "\n\007Balance\022\030.hbase.pb.BalanceRequest\032\031.hb" +
+      "ase.pb.BalanceResponse\022_\n\022SetBalancerRun" +
+      "ning\022#.hbase.pb.SetBalancerRunningReques" +
+      "t\032$.hbase.pb.SetBalancerRunningResponse\022",
+      "\\\n\021IsBalancerEnabled\022\".hbase.pb.IsBalanc" +
+      "erEnabledRequest\032#.hbase.pb.IsBalancerEn" +
+      "abledResponse\022k\n\026SetSplitOrMergeEnabled\022" +
+      "\'.hbase.pb.SetSplitOrMergeEnabledRequest" +
+      "\032(.hbase.pb.SetSplitOrMergeEnabledRespon" +
+      "se\022h\n\025IsSplitOrMergeEnabled\022&.hbase.pb.I" +
+      "sSplitOrMergeEnabledRequest\032\'.hbase.pb.I" +
+      "sSplitOrMergeEnabledResponse\022D\n\tNormaliz" +
+      "e\022\032.hbase.pb.NormalizeRequest\032\033.hbase.pb" +
+      ".NormalizeResponse\022e\n\024SetNormalizerRunni",
+      "ng\022%.hbase.pb.SetNormalizerRunningReques" +
+      "t\032&.hbase.pb.SetNormalizerRunningRespons" +
+      "e\022b\n\023IsNormalizerEnabled\022$.hbase.pb.IsNo" +
+      "rmalizerEnabledRequest\032%.hbase.pb.IsNorm" +
+      "alizerEnabledResponse\022S\n\016RunCatalogScan\022" +
+      "\037.hbase.pb.RunCatalogScanRequest\032 .hbase" +
+      ".pb.RunCatalogScanResponse\022e\n\024EnableCata" +
+      "logJanitor\022%.hbase.pb.EnableCatalogJanit" +
+      "orRequest\032&.hbase.pb.EnableCatalogJanito" +
+      "rResponse\022n\n\027IsCatalogJanitorEnabled\022(.h",
+      "base.pb.IsCatalogJanitorEnabledRequest\032)" +
+      ".hbase.pb.IsCatalogJanitorEnabledRespons" +
+      "e\022^\n\021ExecMasterService\022#.hbase.pb.Coproc" +
+      "essorServiceRequest\032$.hbase.pb.Coprocess" +
+      "orServiceResponse\022A\n\010Snapshot\022\031.hbase.pb" +
+      ".SnapshotRequest\032\032.hbase.pb.SnapshotResp" +
+      "onse\022h\n\025GetCompletedSnapshots\022&.hbase.pb" +
+      ".GetCompletedSnapshotsRequest\032\'.hbase.pb" +
+      ".GetCompletedSnapshotsResponse\022S\n\016Delete" +
+      "Snapshot\022\037.hbase.pb.DeleteSnapshotReques",
+      "t\032 .hbase.pb.DeleteSnapshotResponse\022S\n\016I" +
+      "sSnapshotDone\022\037.hbase.pb.IsSnapshotDoneR" +
+      "equest\032 .hbase.pb.IsSnapshotDoneResponse" +
+      "\022V\n\017RestoreSnapshot\022 .hbase.pb.RestoreSn" +
+      "apshotRequest\032!.hbase.pb.RestoreSnapshot" +
+      "Response\022h\n\025IsRestoreSnapshotDone\022&.hbas" +
+      "e.pb.IsRestoreSnapshotDoneRequest\032\'.hbas" +
+      "e.pb.IsRestoreSnapshotDoneResponse\022P\n\rEx" +
+      "ecProcedure\022\036.hbase.pb.ExecProcedureRequ" +
+      "est\032\037.hbase.pb.ExecProcedureResponse\022W\n\024",
+      "ExecProcedureWithRet\022\036.hbase.pb.ExecProc" +
+      "edureRequest\032\037.hbase.pb.ExecProcedureRes" +
+      "ponse\022V\n\017IsProcedureDone\022 .hbase.pb.IsPr" +
+      "ocedureDoneRequest\032!.hbase.pb.IsProcedur" +
+      "eDoneResponse\022V\n\017ModifyNamespace\022 .hbase" +
+      ".pb.ModifyNamespaceRequest\032!.hbase.pb.Mo" +
+      "difyNamespaceResponse\022V\n\017CreateNamespace" +
+      "\022 .hbase.pb.CreateNamespaceRequest\032!.hba" +
+      "se.pb.CreateNamespaceResponse\022V\n\017DeleteN" +
+      "amespace\022 .hbase.pb.DeleteNamespaceReque",
+      "st\032!.hbase.pb.DeleteNamespaceResponse\022k\n" +
+      "\026GetNamespaceDescriptor\022\'.hbase.pb.GetNa" +
+      "mespaceDescriptorRequest\032(.hbase.pb.GetN" +
+      "amespaceDescriptorResponse\022q\n\030ListNamesp" +
+      "aceDescriptors\022).hbase.pb.ListNamespaceD" +
+      "escriptorsRequest\032*.hbase.pb.ListNamespa" +
+      "ceDescriptorsResponse\022\206\001\n\037ListTableDescr" +
+      "iptorsByNamespace\0220.hbase.pb.ListTableDe" +
+      "scriptorsByNamespaceRequest\0321.hbase.pb.L" +
+      "istTableDescriptorsByNamespaceResponse\022t",
+      "\n\031ListTableNamesByNamespace\022*.hbase.pb.L" +
+      "istTableNamesByNamespaceRequest\032+.hbase." +
+      "pb.ListTableNamesByNamespaceResponse\022P\n\r" +
+      "GetTableState\022\036.hbase.pb.GetTableStateRe" +
+      "quest\032\037.hbase.pb.GetTableStateResponse\022A" +
+      "\n\010SetQuota\022\031.hbase.pb.SetQuotaRequest\032\032." +
+      "hbase.pb.SetQuotaResponse\022x\n\037getLastMajo" +
+      "rCompactionTimestamp\022).hbase.pb.MajorCom" +
+      "pactionTimestampRequest\032*.hbase.pb.Major" +
+      "CompactionTimestampResponse\022\212\001\n(getLastM",
+      "ajorCompactionTimestampForRegion\0222.hbase" +
+      ".pb.MajorCompactionTimestampForRegionReq" +
+      "uest\032*.hbase.pb.MajorCompactionTimestamp" +
+      "Response\022_\n\022getProcedureResult\022#.hbase.p" +
+      "b.GetProcedureResultRequest\032$.hbase.pb.G" +
+      "etProcedureResultResponse\022h\n\027getSecurity" +
+      "Capabilities\022%.hbase.pb.SecurityCapabili" +
+      "tiesRequest\032&.hbase.pb.SecurityCapabilit" +
+      "iesResponse\022S\n\016AbortProcedure\022\037.hbase.pb" +
+      ".AbortProcedureRequest\032 .hbase.pb.AbortP",
+      "rocedureResponse\022S\n\016ListProcedures\022\037.hba" +
+      "se.pb.ListProceduresRequest\032 .hbase.pb.L" +
+      "istProceduresResponseBB\n*org.apache.hado" +
+      "op.hbase.protobuf.generatedB\014MasterProto" +
+      "sH\001\210\001\001\240\001\001"
     };
     com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
       new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
@@ -62748,320 +65134,344 @@ public final class MasterProtos {
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_IsBalancerEnabledResponse_descriptor,
               new java.lang.String[] { "Enabled", });
-          internal_static_hbase_pb_NormalizeRequest_descriptor =
+          internal_static_hbase_pb_SetSplitOrMergeEnabledRequest_descriptor =
             getDescriptor().getMessageTypes().get(52);
+          internal_static_hbase_pb_SetSplitOrMergeEnabledRequest_fieldAccessorTable = new
+            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+              internal_static_hbase_pb_SetSplitOrMergeEnabledRequest_descriptor,
+              new java.lang.String[] { "Enabled", "Synchronous", "SwitchTypes", });
+          internal_static_hbase_pb_SetSplitOrMergeEnabledResponse_descriptor =
+            getDescriptor().getMessageTypes().get(53);
+          internal_static_hbase_pb_SetSplitOrMergeEnabledResponse_fieldAccessorTable = new
+            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+              internal_static_hbase_pb_SetSplitOrMergeEnabledResponse_descriptor,
+              new java.lang.String[] { "PrevValue", });
+          internal_static_hbase_pb_IsSplitOrMergeEnabledRequest_descriptor =
+            getDescriptor().getMessageTypes().get(54);
+          internal_static_hbase_pb_IsSplitOrMergeEnabledRequest_fieldAccessorTable = new
+            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+              internal_static_hbase_pb_IsSplitOrMergeEnabledRequest_descriptor,
+              new java.lang.String[] { "SwitchType", });
+          internal_static_hbase_pb_IsSplitOrMergeEnabledResponse_descriptor =
+            getDescriptor().getMessageTypes().get(55);
+          internal_static_hbase_pb_IsSplitOrMergeEnabledResponse_fieldAccessorTable = new
+            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+              internal_static_hbase_pb_IsSplitOrMergeEnabledResponse_descriptor,
+              new java.lang.String[] { "Enabled", });
+          internal_static_hbase_pb_NormalizeRequest_descriptor =
+            getDescriptor().getMessageTypes().get(56);
           internal_static_hbase_pb_NormalizeRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_NormalizeRequest_descriptor,
               new java.lang.String[] { });
           internal_static_hbase_pb_NormalizeResponse_descriptor =
-            getDescriptor().getMessageTypes().get(53);
+            getDescriptor().getMessageTypes().get(57);
           internal_static_hbase_pb_NormalizeResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_NormalizeResponse_descriptor,
               new java.lang.String[] { "NormalizerRan", });
           internal_static_hbase_pb_SetNormalizerRunningRequest_descriptor =
-            getDescriptor().getMessageTypes().get(54);
+            getDescriptor().getMessageTypes().get(58);
           internal_static_hbase_pb_SetNormalizerRunningRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_SetNormalizerRunningRequest_descriptor,
               new java.lang.String[] { "On", });
           internal_static_hbase_pb_SetNormalizerRunningResponse_descriptor =
-            getDescriptor().getMessageTypes().get(55);
+            getDescriptor().getMessageTypes().get(59);
           internal_static_hbase_pb_SetNormalizerRunningResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_SetNormalizerRunningResponse_descriptor,
               new java.lang.String[] { "PrevNormalizerValue", });
           internal_static_hbase_pb_IsNormalizerEnabledRequest_descriptor =
-            getDescriptor().getMessageTypes().get(56);
+            getDescriptor().getMessageTypes().get(60);
           internal_static_hbase_pb_IsNormalizerEnabledRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_IsNormalizerEnabledRequest_descriptor,
               new java.lang.String[] { });
           internal_static_hbase_pb_IsNormalizerEnabledResponse_descriptor =
-            getDescriptor().getMessageTypes().get(57);
+            getDescriptor().getMessageTypes().get(61);
           internal_static_hbase_pb_IsNormalizerEnabledResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_IsNormalizerEnabledResponse_descriptor,
               new java.lang.String[] { "Enabled", });
           internal_static_hbase_pb_RunCatalogScanRequest_descriptor =
-            getDescriptor().getMessageTypes().get(58);
+            getDescriptor().getMessageTypes().get(62);
           internal_static_hbase_pb_RunCatalogScanRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_RunCatalogScanRequest_descriptor,
               new java.lang.String[] { });
           internal_static_hbase_pb_RunCatalogScanResponse_descriptor =
-            getDescriptor().getMessageTypes().get(59);
+            getDescriptor().getMessageTypes().get(63);
           internal_static_hbase_pb_RunCatalogScanResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_RunCatalogScanResponse_descriptor,
               new java.lang.String[] { "ScanResult", });
           internal_static_hbase_pb_EnableCatalogJanitorRequest_descriptor =
-            getDescriptor().getMessageTypes().get(60);
+            getDescriptor().getMessageTypes().get(64);
           internal_static_hbase_pb_EnableCatalogJanitorRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_EnableCatalogJanitorRequest_descriptor,
               new java.lang.String[] { "Enable", });
           internal_static_hbase_pb_EnableCatalogJanitorResponse_descriptor =
-            getDescriptor().getMessageTypes().get(61);
+            getDescriptor().getMessageTypes().get(65);
           internal_static_hbase_pb_EnableCatalogJanitorResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_EnableCatalogJanitorResponse_descriptor,
               new java.lang.String[] { "PrevValue", });
           internal_static_hbase_pb_IsCatalogJanitorEnabledRequest_descriptor =
-            getDescriptor().getMessageTypes().get(62);
+            getDescriptor().getMessageTypes().get(66);
           internal_static_hbase_pb_IsCatalogJanitorEnabledRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_IsCatalogJanitorEnabledRequest_descriptor,
               new java.lang.String[] { });
           internal_static_hbase_pb_IsCatalogJanitorEnabledResponse_descriptor =
-            getDescriptor().getMessageTypes().get(63);
+            getDescriptor().getMessageTypes().get(67);
           internal_static_hbase_pb_IsCatalogJanitorEnabledResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_IsCatalogJanitorEnabledResponse_descriptor,
               new java.lang.String[] { "Value", });
           internal_static_hbase_pb_SnapshotRequest_descriptor =
-            getDescriptor().getMessageTypes().get(64);
+            getDescriptor().getMessageTypes().get(68);
           internal_static_hbase_pb_SnapshotRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_SnapshotRequest_descriptor,
               new java.lang.String[] { "Snapshot", });
           internal_static_hbase_pb_SnapshotResponse_descriptor =
-            getDescriptor().getMessageTypes().get(65);
+            getDescriptor().getMessageTypes().get(69);
           internal_static_hbase_pb_SnapshotResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_SnapshotResponse_descriptor,
               new java.lang.String[] { "ExpectedTimeout", });
           internal_static_hbase_pb_GetCompletedSnapshotsRequest_descriptor =
-            getDescriptor().getMessageTypes().get(66);
+            getDescriptor().getMessageTypes().get(70);
           internal_static_hbase_pb_GetCompletedSnapshotsRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_GetCompletedSnapshotsRequest_descriptor,
               new java.lang.String[] { });
           internal_static_hbase_pb_GetCompletedSnapshotsResponse_descriptor =
-            getDescriptor().getMessageTypes().get(67);
+            getDescriptor().getMessageTypes().get(71);
           internal_static_hbase_pb_GetCompletedSnapshotsResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_GetCompletedSnapshotsResponse_descriptor,
               new java.lang.String[] { "Snapshots", });
           internal_static_hbase_pb_DeleteSnapshotRequest_descriptor =
-            getDescriptor().getMessageTypes().get(68);
+            getDescriptor().getMessageTypes().get(72);
           internal_static_hbase_pb_DeleteSnapshotRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_DeleteSnapshotRequest_descriptor,
               new java.lang.String[] { "Snapshot", });
           internal_static_hbase_pb_DeleteSnapshotResponse_descriptor =
-            getDescriptor().getMessageTypes().get(69);
+            getDescriptor().getMessageTypes().get(73);
           internal_static_hbase_pb_DeleteSnapshotResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_DeleteSnapshotResponse_descriptor,
               new java.lang.String[] { });
           internal_static_hbase_pb_RestoreSnapshotRequest_descriptor =
-            getDescriptor().getMessageTypes().get(70);
+            getDescriptor().getMessageTypes().get(74);
           internal_static_hbase_pb_RestoreSnapshotRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_RestoreSnapshotRequest_descriptor,
               new java.lang.String[] { "Snapshot", });
           internal_static_hbase_pb_RestoreSnapshotResponse_descriptor =
-            getDescriptor().getMessageTypes().get(71);
+            getDescriptor().getMessageTypes().get(75);
           internal_static_hbase_pb_RestoreSnapshotResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_RestoreSnapshotResponse_descriptor,
               new java.lang.String[] { });
           internal_static_hbase_pb_IsSnapshotDoneRequest_descriptor =
-            getDescriptor().getMessageTypes().get(72);
+            getDescriptor().getMessageTypes().get(76);
           internal_static_hbase_pb_IsSnapshotDoneRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_IsSnapshotDoneRequest_descriptor,
               new java.lang.String[] { "Snapshot", });
           internal_static_hbase_pb_IsSnapshotDoneResponse_descriptor =
-            getDescriptor().getMessageTypes().get(73);
+            getDescriptor().getMessageTypes().get(77);
           internal_static_hbase_pb_IsSnapshotDoneResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_IsSnapshotDoneResponse_descriptor,
               new java.lang.String[] { "Done", "Snapshot", });
           internal_static_hbase_pb_IsRestoreSnapshotDoneRequest_descriptor =
-            getDescriptor().getMessageTypes().get(74);
+            getDescriptor().getMessageTypes().get(78);
           internal_static_hbase_pb_IsRestoreSnapshotDoneRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_IsRestoreSnapshotDoneRequest_descriptor,
               new java.lang.String[] { "Snapshot", });
           internal_static_hbase_pb_IsRestoreSnapshotDoneResponse_descriptor =
-            getDescriptor().getMessageTypes().get(75);
+            getDescriptor().getMessageTypes().get(79);
           internal_static_hbase_pb_IsRestoreSnapshotDoneResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_IsRestoreSnapshotDoneResponse_descriptor,
               new java.lang.String[] { "Done", });
           internal_static_hbase_pb_GetSchemaAlterStatusRequest_descriptor =
-            getDescriptor().getMessageTypes().get(76);
+            getDescriptor().getMessageTypes().get(80);
           internal_static_hbase_pb_GetSchemaAlterStatusRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_GetSchemaAlterStatusRequest_descriptor,
               new java.lang.String[] { "TableName", });
           internal_static_hbase_pb_GetSchemaAlterStatusResponse_descriptor =
-            getDescriptor().getMessageTypes().get(77);
+            getDescriptor().getMessageTypes().get(81);
           internal_static_hbase_pb_GetSchemaAlterStatusResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_GetSchemaAlterStatusResponse_descriptor,
               new java.lang.String[] { "YetToUpdateRegions", "TotalRegions", });
           internal_static_hbase_pb_GetTableDescriptorsRequest_descriptor =
-            getDescriptor().getMessageTypes().get(78);
+            getDescriptor().getMessageTypes().get(82);
           internal_static_hbase_pb_GetTableDescriptorsRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_GetTableDescriptorsRequest_descriptor,
               new java.lang.String[] { "TableNames", "Regex", "IncludeSysTables", "Namespace", });
           internal_static_hbase_pb_GetTableDescriptorsResponse_descriptor =
-            getDescriptor().getMessageTypes().get(79);
+            getDescriptor().getMessageTypes().get(83);
           internal_static_hbase_pb_GetTableDescriptorsResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_GetTableDescriptorsResponse_descriptor,
               new java.lang.String[] { "TableSchema", });
           internal_static_hbase_pb_GetTableNamesRequest_descriptor =
-            getDescriptor().getMessageTypes().get(80);
+            getDescriptor().getMessageTypes().get(84);
           internal_static_hbase_pb_GetTableNamesRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_GetTableNamesRequest_descriptor,
               new java.lang.String[] { "Regex", "IncludeSysTables", "Namespace", });
           internal_static_hbase_pb_GetTableNamesResponse_descriptor =
-            getDescriptor().getMessageTypes().get(81);
+            getDescriptor().getMessageTypes().get(85);
           internal_static_hbase_pb_GetTableNamesResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_GetTableNamesResponse_descriptor,
               new java.lang.String[] { "TableNames", });
           internal_static_hbase_pb_GetTableStateRequest_descriptor =
-            getDescriptor().getMessageTypes().get(82);
+            getDescriptor().getMessageTypes().get(86);
           internal_static_hbase_pb_GetTableStateRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_GetTableStateRequest_descriptor,
               new java.lang.String[] { "TableName", });
           internal_static_hbase_pb_GetTableStateResponse_descriptor =
-            getDescriptor().getMessageTypes().get(83);
+            getDescriptor().getMessageTypes().get(87);
           internal_static_hbase_pb_GetTableStateResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_GetTableStateResponse_descriptor,
               new java.lang.String[] { "TableState", });
           internal_static_hbase_pb_GetClusterStatusRequest_descriptor =
-            getDescriptor().getMessageTypes().get(84);
+            getDescriptor().getMessageTypes().get(88);
           internal_static_hbase_pb_GetClusterStatusRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_GetClusterStatusRequest_descriptor,
               new java.lang.String[] { });
           internal_static_hbase_pb_GetClusterStatusResponse_descriptor =
-            getDescriptor().getMessageTypes().get(85);
+            getDescriptor().getMessageTypes().get(89);
           internal_static_hbase_pb_GetClusterStatusResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_GetClusterStatusResponse_descriptor,
               new java.lang.String[] { "ClusterStatus", });
           internal_static_hbase_pb_IsMasterRunningRequest_descriptor =
-            getDescriptor().getMessageTypes().get(86);
+            getDescriptor().getMessageTypes().get(90);
           internal_static_hbase_pb_IsMasterRunningRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_IsMasterRunningRequest_descriptor,
               new java.lang.String[] { });
           internal_static_hbase_pb_IsMasterRunningResponse_descriptor =
-            getDescriptor().getMessageTypes().get(87);
+            getDescriptor().getMessageTypes().get(91);
           internal_static_hbase_pb_IsMasterRunningResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_IsMasterRunningResponse_descriptor,
               new java.lang.String[] { "IsMasterRunning", });
           internal_static_hbase_pb_ExecProcedureRequest_descriptor =
-            getDescriptor().getMessageTypes().get(88);
+            getDescriptor().getMessageTypes().get(92);
           internal_static_hbase_pb_ExecProcedureRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_ExecProcedureRequest_descriptor,
               new java.lang.String[] { "Procedure", });
           internal_static_hbase_pb_ExecProcedureResponse_descriptor =
-            getDescriptor().getMessageTypes().get(89);
+            getDescriptor().getMessageTypes().get(93);
           internal_static_hbase_pb_ExecProcedureResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_ExecProcedureResponse_descriptor,
               new java.lang.String[] { "ExpectedTimeout", "ReturnData", });
           internal_static_hbase_pb_IsProcedureDoneRequest_descriptor =
-            getDescriptor().getMessageTypes().get(90);
+            getDescriptor().getMessageTypes().get(94);
           internal_static_hbase_pb_IsProcedureDoneRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_IsProcedureDoneRequest_descriptor,
               new java.lang.String[] { "Procedure", });
           internal_static_hbase_pb_IsProcedureDoneResponse_descriptor =
-            getDescriptor().getMessageTypes().get(91);
+            getDescriptor().getMessageTypes().get(95);
           internal_static_hbase_pb_IsProcedureDoneResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_IsProcedureDoneResponse_descriptor,
               new java.lang.String[] { "Done", "Snapshot", });
           internal_static_hbase_pb_GetProcedureResultRequest_descriptor =
-            getDescriptor().getMessageTypes().get(92);
+            getDescriptor().getMessageTypes().get(96);
           internal_static_hbase_pb_GetProcedureResultRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_GetProcedureResultRequest_descriptor,
               new java.lang.String[] { "ProcId", });
           internal_static_hbase_pb_GetProcedureResultResponse_descriptor =
-            getDescriptor().getMessageTypes().get(93);
+            getDescriptor().getMessageTypes().get(97);
           internal_static_hbase_pb_GetProcedureResultResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_GetProcedureResultResponse_descriptor,
               new java.lang.String[] { "State", "StartTime", "LastUpdate", "Result", "Exception", });
           internal_static_hbase_pb_AbortProcedureRequest_descriptor =
-            getDescriptor().getMessageTypes().get(94);
+            getDescriptor().getMessageTypes().get(98);
           internal_static_hbase_pb_AbortProcedureRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_AbortProcedureRequest_descriptor,
               new java.lang.String[] { "ProcId", "MayInterruptIfRunning", });
           internal_static_hbase_pb_AbortProcedureResponse_descriptor =
-            getDescriptor().getMessageTypes().get(95);
+            getDescriptor().getMessageTypes().get(99);
           internal_static_hbase_pb_AbortProcedureResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_AbortProcedureResponse_descriptor,
               new java.lang.String[] { "IsProcedureAborted", });
           internal_static_hbase_pb_ListProceduresRequest_descriptor =
-            getDescriptor().getMessageTypes().get(96);
+            getDescriptor().getMessageTypes().get(100);
           internal_static_hbase_pb_ListProceduresRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_ListProceduresRequest_descriptor,
               new java.lang.String[] { });
           internal_static_hbase_pb_ListProceduresResponse_descriptor =
-            getDescriptor().getMessageTypes().get(97);
+            getDescriptor().getMessageTypes().get(101);
           internal_static_hbase_pb_ListProceduresResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_ListProceduresResponse_descriptor,
               new java.lang.String[] { "Procedure", });
           internal_static_hbase_pb_SetQuotaRequest_descriptor =
-            getDescriptor().getMessageTypes().get(98);
+            getDescriptor().getMessageTypes().get(102);
           internal_static_hbase_pb_SetQuotaRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_SetQuotaRequest_descriptor,
               new java.lang.String[] { "UserName", "UserGroup", "Namespace", "TableName", "RemoveAll", "BypassGlobals", "Throttle", });
           internal_static_hbase_pb_SetQuotaResponse_descriptor =
-            getDescriptor().getMessageTypes().get(99);
+            getDescriptor().getMessageTypes().get(103);
           internal_static_hbase_pb_SetQuotaResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_SetQuotaResponse_descriptor,
               new java.lang.String[] { });
           internal_static_hbase_pb_MajorCompactionTimestampRequest_descriptor =
-            getDescriptor().getMessageTypes().get(100);
+            getDescriptor().getMessageTypes().get(104);
           internal_static_hbase_pb_MajorCompactionTimestampRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_MajorCompactionTimestampRequest_descriptor,
               new java.lang.String[] { "TableName", });
           internal_static_hbase_pb_MajorCompactionTimestampForRegionRequest_descriptor =
-            getDescriptor().getMessageTypes().get(101);
+            getDescriptor().getMessageTypes().get(105);
           internal_static_hbase_pb_MajorCompactionTimestampForRegionRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_MajorCompactionTimestampForRegionRequest_descriptor,
               new java.lang.String[] { "Region", });
           internal_static_hbase_pb_MajorCompactionTimestampResponse_descriptor =
-            getDescriptor().getMessageTypes().get(102);
+            getDescriptor().getMessageTypes().get(106);
           internal_static_hbase_pb_MajorCompactionTimestampResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_MajorCompactionTimestampResponse_descriptor,
               new java.lang.String[] { "CompactionTimestamp", });
           internal_static_hbase_pb_SecurityCapabilitiesRequest_descriptor =
-            getDescriptor().getMessageTypes().get(103);
+            getDescriptor().getMessageTypes().get(107);
           internal_static_hbase_pb_SecurityCapabilitiesRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_SecurityCapabilitiesRequest_descriptor,
               new java.lang.String[] { });
           internal_static_hbase_pb_SecurityCapabilitiesResponse_descriptor =
-            getDescriptor().getMessageTypes().get(104);
+            getDescriptor().getMessageTypes().get(108);
           internal_static_hbase_pb_SecurityCapabilitiesResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_SecurityCapabilitiesResponse_descriptor,
diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SnapshotProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SnapshotProtos.java
index 8dbb5ad..9805d50 100644
--- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SnapshotProtos.java
+++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SnapshotProtos.java
@@ -11,13 +11,13 @@ public final class SnapshotProtos {
   public interface SnapshotFileInfoOrBuilder
       extends com.google.protobuf.MessageOrBuilder {
 
-    // required .SnapshotFileInfo.Type type = 1;
+    // required .hbase.pb.SnapshotFileInfo.Type type = 1;
     /**
-     * required .SnapshotFileInfo.Type type = 1;
+     * required .hbase.pb.SnapshotFileInfo.Type type = 1;
      */
     boolean hasType();
     /**
-     * required .SnapshotFileInfo.Type type = 1;
+     * required .hbase.pb.SnapshotFileInfo.Type type = 1;
      */
     org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotFileInfo.Type getType();
 
@@ -67,7 +67,7 @@ public final class SnapshotProtos {
         getWalNameBytes();
   }
   /**
-   * Protobuf type {@code SnapshotFileInfo}
+   * Protobuf type {@code hbase.pb.SnapshotFileInfo}
    */
   public static final class SnapshotFileInfo extends
       com.google.protobuf.GeneratedMessage
@@ -157,12 +157,12 @@ public final class SnapshotProtos {
     }
     public static final com.google.protobuf.Descriptors.Descriptor
         getDescriptor() {
-      return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotFileInfo_descriptor;
+      return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotFileInfo_descriptor;
     }
 
     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
         internalGetFieldAccessorTable() {
-      return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotFileInfo_fieldAccessorTable
+      return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotFileInfo_fieldAccessorTable
           .ensureFieldAccessorsInitialized(
               org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotFileInfo.class, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotFileInfo.Builder.class);
     }
@@ -183,7 +183,7 @@ public final class SnapshotProtos {
     }
 
     /**
-     * Protobuf enum {@code SnapshotFileInfo.Type}
+     * Protobuf enum {@code hbase.pb.SnapshotFileInfo.Type}
      */
     public enum Type
         implements com.google.protobuf.ProtocolMessageEnum {
@@ -261,21 +261,21 @@ public final class SnapshotProtos {
         this.value = value;
       }
 
-      // @@protoc_insertion_point(enum_scope:SnapshotFileInfo.Type)
+      // @@protoc_insertion_point(enum_scope:hbase.pb.SnapshotFileInfo.Type)
     }
 
     private int bitField0_;
-    // required .SnapshotFileInfo.Type type = 1;
+    // required .hbase.pb.SnapshotFileInfo.Type type = 1;
     public static final int TYPE_FIELD_NUMBER = 1;
     private org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotFileInfo.Type type_;
     /**
-     * required .SnapshotFileInfo.Type type = 1;
+     * required .hbase.pb.SnapshotFileInfo.Type type = 1;
      */
     public boolean hasType() {
       return ((bitField0_ & 0x00000001) == 0x00000001);
     }
     /**
-     * required .SnapshotFileInfo.Type type = 1;
+     * required .hbase.pb.SnapshotFileInfo.Type type = 1;
      */
     public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotFileInfo.Type getType() {
       return type_;
@@ -613,19 +613,19 @@ public final class SnapshotProtos {
       return builder;
     }
     /**
-     * Protobuf type {@code SnapshotFileInfo}
+     * Protobuf type {@code hbase.pb.SnapshotFileInfo}
      */
     public static final class Builder extends
         com.google.protobuf.GeneratedMessage.Builder
        implements org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotFileInfoOrBuilder {
       public static final com.google.protobuf.Descriptors.Descriptor
           getDescriptor() {
-        return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotFileInfo_descriptor;
+        return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotFileInfo_descriptor;
       }
 
       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
           internalGetFieldAccessorTable() {
-        return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotFileInfo_fieldAccessorTable
+        return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotFileInfo_fieldAccessorTable
             .ensureFieldAccessorsInitialized(
                 org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotFileInfo.class, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotFileInfo.Builder.class);
       }
@@ -667,7 +667,7 @@ public final class SnapshotProtos {
 
       public com.google.protobuf.Descriptors.Descriptor
           getDescriptorForType() {
-        return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotFileInfo_descriptor;
+        return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotFileInfo_descriptor;
       }
 
       public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotFileInfo getDefaultInstanceForType() {
@@ -767,22 +767,22 @@ public final class SnapshotProtos {
       }
       private int bitField0_;
 
-      // required .SnapshotFileInfo.Type type = 1;
+      // required .hbase.pb.SnapshotFileInfo.Type type = 1;
       private org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotFileInfo.Type type_ = org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotFileInfo.Type.HFILE;
       /**
-       * required .SnapshotFileInfo.Type type = 1;
+       * required .hbase.pb.SnapshotFileInfo.Type type = 1;
        */
       public boolean hasType() {
         return ((bitField0_ & 0x00000001) == 0x00000001);
       }
       /**
-       * required .SnapshotFileInfo.Type type = 1;
+       * required .hbase.pb.SnapshotFileInfo.Type type = 1;
        */
       public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotFileInfo.Type getType() {
         return type_;
       }
       /**
-       * required .SnapshotFileInfo.Type type = 1;
+       * required .hbase.pb.SnapshotFileInfo.Type type = 1;
        */
       public Builder setType(org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotFileInfo.Type value) {
         if (value == null) {
@@ -794,7 +794,7 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * required .SnapshotFileInfo.Type type = 1;
+       * required .hbase.pb.SnapshotFileInfo.Type type = 1;
        */
       public Builder clearType() {
         bitField0_ = (bitField0_ & ~0x00000001);
@@ -1025,7 +1025,7 @@ public final class SnapshotProtos {
         return this;
       }
 
-      // @@protoc_insertion_point(builder_scope:SnapshotFileInfo)
+      // @@protoc_insertion_point(builder_scope:hbase.pb.SnapshotFileInfo)
     }
 
     static {
@@ -1033,7 +1033,7 @@ public final class SnapshotProtos {
       defaultInstance.initFields();
     }
 
-    // @@protoc_insertion_point(class_scope:SnapshotFileInfo)
+    // @@protoc_insertion_point(class_scope:hbase.pb.SnapshotFileInfo)
   }
 
   public interface SnapshotRegionManifestOrBuilder
@@ -1049,47 +1049,47 @@ public final class SnapshotProtos {
      */
     int getVersion();
 
-    // required .RegionInfo region_info = 2;
+    // required .hbase.pb.RegionInfo region_info = 2;
     /**
-     * required .RegionInfo region_info = 2;
+     * required .hbase.pb.RegionInfo region_info = 2;
      */
     boolean hasRegionInfo();
     /**
-     * required .RegionInfo region_info = 2;
+     * required .hbase.pb.RegionInfo region_info = 2;
      */
     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo();
     /**
-     * required .RegionInfo region_info = 2;
+     * required .hbase.pb.RegionInfo region_info = 2;
      */
     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder();
 
-    // repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+    // repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
     /**
-     * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+     * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
      */
     java.util.List 
         getFamilyFilesList();
     /**
-     * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+     * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
      */
     org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles getFamilyFiles(int index);
     /**
-     * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+     * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
      */
     int getFamilyFilesCount();
     /**
-     * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+     * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
      */
     java.util.List 
         getFamilyFilesOrBuilderList();
     /**
-     * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+     * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
      */
     org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFilesOrBuilder getFamilyFilesOrBuilder(
         int index);
   }
   /**
-   * Protobuf type {@code SnapshotRegionManifest}
+   * Protobuf type {@code hbase.pb.SnapshotRegionManifest}
    */
   public static final class SnapshotRegionManifest extends
       com.google.protobuf.GeneratedMessage
@@ -1182,12 +1182,12 @@ public final class SnapshotProtos {
     }
     public static final com.google.protobuf.Descriptors.Descriptor
         getDescriptor() {
-      return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotRegionManifest_descriptor;
+      return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotRegionManifest_descriptor;
     }
 
     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
         internalGetFieldAccessorTable() {
-      return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotRegionManifest_fieldAccessorTable
+      return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotRegionManifest_fieldAccessorTable
           .ensureFieldAccessorsInitialized(
               org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.class, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.Builder.class);
     }
@@ -1225,17 +1225,17 @@ public final class SnapshotProtos {
       com.google.protobuf.ByteString
           getNameBytes();
 
-      // optional .Reference reference = 2;
+      // optional .hbase.pb.Reference reference = 2;
       /**
-       * optional .Reference reference = 2;
+       * optional .hbase.pb.Reference reference = 2;
        */
       boolean hasReference();
       /**
-       * optional .Reference reference = 2;
+       * optional .hbase.pb.Reference reference = 2;
        */
       org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference getReference();
       /**
-       * optional .Reference reference = 2;
+       * optional .hbase.pb.Reference reference = 2;
        */
       org.apache.hadoop.hbase.protobuf.generated.FSProtos.ReferenceOrBuilder getReferenceOrBuilder();
 
@@ -1258,7 +1258,7 @@ public final class SnapshotProtos {
       long getFileSize();
     }
     /**
-     * Protobuf type {@code SnapshotRegionManifest.StoreFile}
+     * Protobuf type {@code hbase.pb.SnapshotRegionManifest.StoreFile}
      */
     public static final class StoreFile extends
         com.google.protobuf.GeneratedMessage
@@ -1345,12 +1345,12 @@ public final class SnapshotProtos {
       }
       public static final com.google.protobuf.Descriptors.Descriptor
           getDescriptor() {
-        return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotRegionManifest_StoreFile_descriptor;
+        return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotRegionManifest_StoreFile_descriptor;
       }
 
       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
           internalGetFieldAccessorTable() {
-        return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotRegionManifest_StoreFile_fieldAccessorTable
+        return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotRegionManifest_StoreFile_fieldAccessorTable
             .ensureFieldAccessorsInitialized(
                 org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile.class, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile.Builder.class);
       }
@@ -1414,23 +1414,23 @@ public final class SnapshotProtos {
         }
       }
 
-      // optional .Reference reference = 2;
+      // optional .hbase.pb.Reference reference = 2;
       public static final int REFERENCE_FIELD_NUMBER = 2;
       private org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference reference_;
       /**
-       * optional .Reference reference = 2;
+       * optional .hbase.pb.Reference reference = 2;
        */
       public boolean hasReference() {
         return ((bitField0_ & 0x00000002) == 0x00000002);
       }
       /**
-       * optional .Reference reference = 2;
+       * optional .hbase.pb.Reference reference = 2;
        */
       public org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference getReference() {
         return reference_;
       }
       /**
-       * optional .Reference reference = 2;
+       * optional .hbase.pb.Reference reference = 2;
        */
       public org.apache.hadoop.hbase.protobuf.generated.FSProtos.ReferenceOrBuilder getReferenceOrBuilder() {
         return reference_;
@@ -1652,19 +1652,19 @@ public final class SnapshotProtos {
         return builder;
       }
       /**
-       * Protobuf type {@code SnapshotRegionManifest.StoreFile}
+       * Protobuf type {@code hbase.pb.SnapshotRegionManifest.StoreFile}
        */
       public static final class Builder extends
           com.google.protobuf.GeneratedMessage.Builder
          implements org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFileOrBuilder {
         public static final com.google.protobuf.Descriptors.Descriptor
             getDescriptor() {
-          return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotRegionManifest_StoreFile_descriptor;
+          return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotRegionManifest_StoreFile_descriptor;
         }
 
         protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
             internalGetFieldAccessorTable() {
-          return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotRegionManifest_StoreFile_fieldAccessorTable
+          return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotRegionManifest_StoreFile_fieldAccessorTable
               .ensureFieldAccessorsInitialized(
                   org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile.class, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile.Builder.class);
         }
@@ -1709,7 +1709,7 @@ public final class SnapshotProtos {
 
         public com.google.protobuf.Descriptors.Descriptor
             getDescriptorForType() {
-          return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotRegionManifest_StoreFile_descriptor;
+          return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotRegionManifest_StoreFile_descriptor;
         }
 
         public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile getDefaultInstanceForType() {
@@ -1882,18 +1882,18 @@ public final class SnapshotProtos {
           return this;
         }
 
-        // optional .Reference reference = 2;
+        // optional .hbase.pb.Reference reference = 2;
         private org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference reference_ = org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.getDefaultInstance();
         private com.google.protobuf.SingleFieldBuilder<
             org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference, org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Builder, org.apache.hadoop.hbase.protobuf.generated.FSProtos.ReferenceOrBuilder> referenceBuilder_;
         /**
-         * optional .Reference reference = 2;
+         * optional .hbase.pb.Reference reference = 2;
          */
         public boolean hasReference() {
           return ((bitField0_ & 0x00000002) == 0x00000002);
         }
         /**
-         * optional .Reference reference = 2;
+         * optional .hbase.pb.Reference reference = 2;
          */
         public org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference getReference() {
           if (referenceBuilder_ == null) {
@@ -1903,7 +1903,7 @@ public final class SnapshotProtos {
           }
         }
         /**
-         * optional .Reference reference = 2;
+         * optional .hbase.pb.Reference reference = 2;
          */
         public Builder setReference(org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference value) {
           if (referenceBuilder_ == null) {
@@ -1919,7 +1919,7 @@ public final class SnapshotProtos {
           return this;
         }
         /**
-         * optional .Reference reference = 2;
+         * optional .hbase.pb.Reference reference = 2;
          */
         public Builder setReference(
             org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Builder builderForValue) {
@@ -1933,7 +1933,7 @@ public final class SnapshotProtos {
           return this;
         }
         /**
-         * optional .Reference reference = 2;
+         * optional .hbase.pb.Reference reference = 2;
          */
         public Builder mergeReference(org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference value) {
           if (referenceBuilder_ == null) {
@@ -1952,7 +1952,7 @@ public final class SnapshotProtos {
           return this;
         }
         /**
-         * optional .Reference reference = 2;
+         * optional .hbase.pb.Reference reference = 2;
          */
         public Builder clearReference() {
           if (referenceBuilder_ == null) {
@@ -1965,7 +1965,7 @@ public final class SnapshotProtos {
           return this;
         }
         /**
-         * optional .Reference reference = 2;
+         * optional .hbase.pb.Reference reference = 2;
          */
         public org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Builder getReferenceBuilder() {
           bitField0_ |= 0x00000002;
@@ -1973,7 +1973,7 @@ public final class SnapshotProtos {
           return getReferenceFieldBuilder().getBuilder();
         }
         /**
-         * optional .Reference reference = 2;
+         * optional .hbase.pb.Reference reference = 2;
          */
         public org.apache.hadoop.hbase.protobuf.generated.FSProtos.ReferenceOrBuilder getReferenceOrBuilder() {
           if (referenceBuilder_ != null) {
@@ -1983,7 +1983,7 @@ public final class SnapshotProtos {
           }
         }
         /**
-         * optional .Reference reference = 2;
+         * optional .hbase.pb.Reference reference = 2;
          */
         private com.google.protobuf.SingleFieldBuilder<
             org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference, org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Builder, org.apache.hadoop.hbase.protobuf.generated.FSProtos.ReferenceOrBuilder> 
@@ -2048,7 +2048,7 @@ public final class SnapshotProtos {
           return this;
         }
 
-        // @@protoc_insertion_point(builder_scope:SnapshotRegionManifest.StoreFile)
+        // @@protoc_insertion_point(builder_scope:hbase.pb.SnapshotRegionManifest.StoreFile)
       }
 
       static {
@@ -2056,7 +2056,7 @@ public final class SnapshotProtos {
         defaultInstance.initFields();
       }
 
-      // @@protoc_insertion_point(class_scope:SnapshotRegionManifest.StoreFile)
+      // @@protoc_insertion_point(class_scope:hbase.pb.SnapshotRegionManifest.StoreFile)
     }
 
     public interface FamilyFilesOrBuilder
@@ -2072,33 +2072,33 @@ public final class SnapshotProtos {
        */
       com.google.protobuf.ByteString getFamilyName();
 
-      // repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+      // repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
       /**
-       * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
        */
       java.util.List 
           getStoreFilesList();
       /**
-       * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
        */
       org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile getStoreFiles(int index);
       /**
-       * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
        */
       int getStoreFilesCount();
       /**
-       * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
        */
       java.util.List 
           getStoreFilesOrBuilderList();
       /**
-       * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
        */
       org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFileOrBuilder getStoreFilesOrBuilder(
           int index);
     }
     /**
-     * Protobuf type {@code SnapshotRegionManifest.FamilyFiles}
+     * Protobuf type {@code hbase.pb.SnapshotRegionManifest.FamilyFiles}
      */
     public static final class FamilyFiles extends
         com.google.protobuf.GeneratedMessage
@@ -2178,12 +2178,12 @@ public final class SnapshotProtos {
       }
       public static final com.google.protobuf.Descriptors.Descriptor
           getDescriptor() {
-        return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotRegionManifest_FamilyFiles_descriptor;
+        return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotRegionManifest_FamilyFiles_descriptor;
       }
 
       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
           internalGetFieldAccessorTable() {
-        return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotRegionManifest_FamilyFiles_fieldAccessorTable
+        return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotRegionManifest_FamilyFiles_fieldAccessorTable
             .ensureFieldAccessorsInitialized(
                 org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles.class, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles.Builder.class);
       }
@@ -2220,36 +2220,36 @@ public final class SnapshotProtos {
         return familyName_;
       }
 
-      // repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+      // repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
       public static final int STORE_FILES_FIELD_NUMBER = 2;
       private java.util.List storeFiles_;
       /**
-       * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
        */
       public java.util.List getStoreFilesList() {
         return storeFiles_;
       }
       /**
-       * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
        */
       public java.util.List 
           getStoreFilesOrBuilderList() {
         return storeFiles_;
       }
       /**
-       * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
        */
       public int getStoreFilesCount() {
         return storeFiles_.size();
       }
       /**
-       * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
        */
       public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile getStoreFiles(int index) {
         return storeFiles_.get(index);
       }
       /**
-       * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
        */
       public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFileOrBuilder getStoreFilesOrBuilder(
           int index) {
@@ -2428,19 +2428,19 @@ public final class SnapshotProtos {
         return builder;
       }
       /**
-       * Protobuf type {@code SnapshotRegionManifest.FamilyFiles}
+       * Protobuf type {@code hbase.pb.SnapshotRegionManifest.FamilyFiles}
        */
       public static final class Builder extends
           com.google.protobuf.GeneratedMessage.Builder
          implements org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFilesOrBuilder {
         public static final com.google.protobuf.Descriptors.Descriptor
             getDescriptor() {
-          return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotRegionManifest_FamilyFiles_descriptor;
+          return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotRegionManifest_FamilyFiles_descriptor;
         }
 
         protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
             internalGetFieldAccessorTable() {
-          return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotRegionManifest_FamilyFiles_fieldAccessorTable
+          return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotRegionManifest_FamilyFiles_fieldAccessorTable
               .ensureFieldAccessorsInitialized(
                   org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles.class, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles.Builder.class);
         }
@@ -2483,7 +2483,7 @@ public final class SnapshotProtos {
 
         public com.google.protobuf.Descriptors.Descriptor
             getDescriptorForType() {
-          return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotRegionManifest_FamilyFiles_descriptor;
+          return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotRegionManifest_FamilyFiles_descriptor;
         }
 
         public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles getDefaultInstanceForType() {
@@ -2633,7 +2633,7 @@ public final class SnapshotProtos {
           return this;
         }
 
-        // repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+        // repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
         private java.util.List storeFiles_ =
           java.util.Collections.emptyList();
         private void ensureStoreFilesIsMutable() {
@@ -2647,7 +2647,7 @@ public final class SnapshotProtos {
             org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile.Builder, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFileOrBuilder> storeFilesBuilder_;
 
         /**
-         * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+         * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
          */
         public java.util.List getStoreFilesList() {
           if (storeFilesBuilder_ == null) {
@@ -2657,7 +2657,7 @@ public final class SnapshotProtos {
           }
         }
         /**
-         * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+         * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
          */
         public int getStoreFilesCount() {
           if (storeFilesBuilder_ == null) {
@@ -2667,7 +2667,7 @@ public final class SnapshotProtos {
           }
         }
         /**
-         * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+         * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
          */
         public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile getStoreFiles(int index) {
           if (storeFilesBuilder_ == null) {
@@ -2677,7 +2677,7 @@ public final class SnapshotProtos {
           }
         }
         /**
-         * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+         * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
          */
         public Builder setStoreFiles(
             int index, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile value) {
@@ -2694,7 +2694,7 @@ public final class SnapshotProtos {
           return this;
         }
         /**
-         * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+         * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
          */
         public Builder setStoreFiles(
             int index, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile.Builder builderForValue) {
@@ -2708,7 +2708,7 @@ public final class SnapshotProtos {
           return this;
         }
         /**
-         * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+         * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
          */
         public Builder addStoreFiles(org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile value) {
           if (storeFilesBuilder_ == null) {
@@ -2724,7 +2724,7 @@ public final class SnapshotProtos {
           return this;
         }
         /**
-         * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+         * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
          */
         public Builder addStoreFiles(
             int index, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile value) {
@@ -2741,7 +2741,7 @@ public final class SnapshotProtos {
           return this;
         }
         /**
-         * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+         * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
          */
         public Builder addStoreFiles(
             org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile.Builder builderForValue) {
@@ -2755,7 +2755,7 @@ public final class SnapshotProtos {
           return this;
         }
         /**
-         * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+         * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
          */
         public Builder addStoreFiles(
             int index, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile.Builder builderForValue) {
@@ -2769,7 +2769,7 @@ public final class SnapshotProtos {
           return this;
         }
         /**
-         * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+         * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
          */
         public Builder addAllStoreFiles(
             java.lang.Iterable values) {
@@ -2783,7 +2783,7 @@ public final class SnapshotProtos {
           return this;
         }
         /**
-         * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+         * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
          */
         public Builder clearStoreFiles() {
           if (storeFilesBuilder_ == null) {
@@ -2796,7 +2796,7 @@ public final class SnapshotProtos {
           return this;
         }
         /**
-         * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+         * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
          */
         public Builder removeStoreFiles(int index) {
           if (storeFilesBuilder_ == null) {
@@ -2809,14 +2809,14 @@ public final class SnapshotProtos {
           return this;
         }
         /**
-         * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+         * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
          */
         public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile.Builder getStoreFilesBuilder(
             int index) {
           return getStoreFilesFieldBuilder().getBuilder(index);
         }
         /**
-         * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+         * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
          */
         public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFileOrBuilder getStoreFilesOrBuilder(
             int index) {
@@ -2826,7 +2826,7 @@ public final class SnapshotProtos {
           }
         }
         /**
-         * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+         * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
          */
         public java.util.List 
              getStoreFilesOrBuilderList() {
@@ -2837,14 +2837,14 @@ public final class SnapshotProtos {
           }
         }
         /**
-         * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+         * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
          */
         public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile.Builder addStoreFilesBuilder() {
           return getStoreFilesFieldBuilder().addBuilder(
               org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile.getDefaultInstance());
         }
         /**
-         * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+         * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
          */
         public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile.Builder addStoreFilesBuilder(
             int index) {
@@ -2852,7 +2852,7 @@ public final class SnapshotProtos {
               index, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile.getDefaultInstance());
         }
         /**
-         * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+         * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
          */
         public java.util.List 
              getStoreFilesBuilderList() {
@@ -2873,7 +2873,7 @@ public final class SnapshotProtos {
           return storeFilesBuilder_;
         }
 
-        // @@protoc_insertion_point(builder_scope:SnapshotRegionManifest.FamilyFiles)
+        // @@protoc_insertion_point(builder_scope:hbase.pb.SnapshotRegionManifest.FamilyFiles)
       }
 
       static {
@@ -2881,7 +2881,7 @@ public final class SnapshotProtos {
         defaultInstance.initFields();
       }
 
-      // @@protoc_insertion_point(class_scope:SnapshotRegionManifest.FamilyFiles)
+      // @@protoc_insertion_point(class_scope:hbase.pb.SnapshotRegionManifest.FamilyFiles)
     }
 
     private int bitField0_;
@@ -2901,58 +2901,58 @@ public final class SnapshotProtos {
       return version_;
     }
 
-    // required .RegionInfo region_info = 2;
+    // required .hbase.pb.RegionInfo region_info = 2;
     public static final int REGION_INFO_FIELD_NUMBER = 2;
     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo regionInfo_;
     /**
-     * required .RegionInfo region_info = 2;
+     * required .hbase.pb.RegionInfo region_info = 2;
      */
     public boolean hasRegionInfo() {
       return ((bitField0_ & 0x00000002) == 0x00000002);
     }
     /**
-     * required .RegionInfo region_info = 2;
+     * required .hbase.pb.RegionInfo region_info = 2;
      */
     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo() {
       return regionInfo_;
     }
     /**
-     * required .RegionInfo region_info = 2;
+     * required .hbase.pb.RegionInfo region_info = 2;
      */
     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder() {
       return regionInfo_;
     }
 
-    // repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+    // repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
     public static final int FAMILY_FILES_FIELD_NUMBER = 3;
     private java.util.List familyFiles_;
     /**
-     * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+     * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
      */
     public java.util.List getFamilyFilesList() {
       return familyFiles_;
     }
     /**
-     * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+     * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
      */
     public java.util.List 
         getFamilyFilesOrBuilderList() {
       return familyFiles_;
     }
     /**
-     * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+     * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
      */
     public int getFamilyFilesCount() {
       return familyFiles_.size();
     }
     /**
-     * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+     * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
      */
     public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles getFamilyFiles(int index) {
       return familyFiles_.get(index);
     }
     /**
-     * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+     * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
      */
     public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFilesOrBuilder getFamilyFilesOrBuilder(
         int index) {
@@ -3152,19 +3152,19 @@ public final class SnapshotProtos {
       return builder;
     }
     /**
-     * Protobuf type {@code SnapshotRegionManifest}
+     * Protobuf type {@code hbase.pb.SnapshotRegionManifest}
      */
     public static final class Builder extends
         com.google.protobuf.GeneratedMessage.Builder
        implements org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifestOrBuilder {
       public static final com.google.protobuf.Descriptors.Descriptor
           getDescriptor() {
-        return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotRegionManifest_descriptor;
+        return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotRegionManifest_descriptor;
       }
 
       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
           internalGetFieldAccessorTable() {
-        return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotRegionManifest_fieldAccessorTable
+        return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotRegionManifest_fieldAccessorTable
             .ensureFieldAccessorsInitialized(
                 org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.class, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.Builder.class);
       }
@@ -3214,7 +3214,7 @@ public final class SnapshotProtos {
 
       public com.google.protobuf.Descriptors.Descriptor
           getDescriptorForType() {
-        return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotRegionManifest_descriptor;
+        return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotRegionManifest_descriptor;
       }
 
       public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest getDefaultInstanceForType() {
@@ -3376,18 +3376,18 @@ public final class SnapshotProtos {
         return this;
       }
 
-      // required .RegionInfo region_info = 2;
+      // required .hbase.pb.RegionInfo region_info = 2;
       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance();
       private com.google.protobuf.SingleFieldBuilder<
           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionInfoBuilder_;
       /**
-       * required .RegionInfo region_info = 2;
+       * required .hbase.pb.RegionInfo region_info = 2;
        */
       public boolean hasRegionInfo() {
         return ((bitField0_ & 0x00000002) == 0x00000002);
       }
       /**
-       * required .RegionInfo region_info = 2;
+       * required .hbase.pb.RegionInfo region_info = 2;
        */
       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo() {
         if (regionInfoBuilder_ == null) {
@@ -3397,7 +3397,7 @@ public final class SnapshotProtos {
         }
       }
       /**
-       * required .RegionInfo region_info = 2;
+       * required .hbase.pb.RegionInfo region_info = 2;
        */
       public Builder setRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) {
         if (regionInfoBuilder_ == null) {
@@ -3413,7 +3413,7 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * required .RegionInfo region_info = 2;
+       * required .hbase.pb.RegionInfo region_info = 2;
        */
       public Builder setRegionInfo(
           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) {
@@ -3427,7 +3427,7 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * required .RegionInfo region_info = 2;
+       * required .hbase.pb.RegionInfo region_info = 2;
        */
       public Builder mergeRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) {
         if (regionInfoBuilder_ == null) {
@@ -3446,7 +3446,7 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * required .RegionInfo region_info = 2;
+       * required .hbase.pb.RegionInfo region_info = 2;
        */
       public Builder clearRegionInfo() {
         if (regionInfoBuilder_ == null) {
@@ -3459,7 +3459,7 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * required .RegionInfo region_info = 2;
+       * required .hbase.pb.RegionInfo region_info = 2;
        */
       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder getRegionInfoBuilder() {
         bitField0_ |= 0x00000002;
@@ -3467,7 +3467,7 @@ public final class SnapshotProtos {
         return getRegionInfoFieldBuilder().getBuilder();
       }
       /**
-       * required .RegionInfo region_info = 2;
+       * required .hbase.pb.RegionInfo region_info = 2;
        */
       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder() {
         if (regionInfoBuilder_ != null) {
@@ -3477,7 +3477,7 @@ public final class SnapshotProtos {
         }
       }
       /**
-       * required .RegionInfo region_info = 2;
+       * required .hbase.pb.RegionInfo region_info = 2;
        */
       private com.google.protobuf.SingleFieldBuilder<
           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> 
@@ -3493,7 +3493,7 @@ public final class SnapshotProtos {
         return regionInfoBuilder_;
       }
 
-      // repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+      // repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
       private java.util.List familyFiles_ =
         java.util.Collections.emptyList();
       private void ensureFamilyFilesIsMutable() {
@@ -3507,7 +3507,7 @@ public final class SnapshotProtos {
           org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles.Builder, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFilesOrBuilder> familyFilesBuilder_;
 
       /**
-       * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+       * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
        */
       public java.util.List getFamilyFilesList() {
         if (familyFilesBuilder_ == null) {
@@ -3517,7 +3517,7 @@ public final class SnapshotProtos {
         }
       }
       /**
-       * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+       * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
        */
       public int getFamilyFilesCount() {
         if (familyFilesBuilder_ == null) {
@@ -3527,7 +3527,7 @@ public final class SnapshotProtos {
         }
       }
       /**
-       * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+       * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
        */
       public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles getFamilyFiles(int index) {
         if (familyFilesBuilder_ == null) {
@@ -3537,7 +3537,7 @@ public final class SnapshotProtos {
         }
       }
       /**
-       * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+       * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
        */
       public Builder setFamilyFiles(
           int index, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles value) {
@@ -3554,7 +3554,7 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+       * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
        */
       public Builder setFamilyFiles(
           int index, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles.Builder builderForValue) {
@@ -3568,7 +3568,7 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+       * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
        */
       public Builder addFamilyFiles(org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles value) {
         if (familyFilesBuilder_ == null) {
@@ -3584,7 +3584,7 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+       * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
        */
       public Builder addFamilyFiles(
           int index, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles value) {
@@ -3601,7 +3601,7 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+       * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
        */
       public Builder addFamilyFiles(
           org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles.Builder builderForValue) {
@@ -3615,7 +3615,7 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+       * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
        */
       public Builder addFamilyFiles(
           int index, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles.Builder builderForValue) {
@@ -3629,7 +3629,7 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+       * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
        */
       public Builder addAllFamilyFiles(
           java.lang.Iterable values) {
@@ -3643,7 +3643,7 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+       * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
        */
       public Builder clearFamilyFiles() {
         if (familyFilesBuilder_ == null) {
@@ -3656,7 +3656,7 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+       * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
        */
       public Builder removeFamilyFiles(int index) {
         if (familyFilesBuilder_ == null) {
@@ -3669,14 +3669,14 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+       * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
        */
       public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles.Builder getFamilyFilesBuilder(
           int index) {
         return getFamilyFilesFieldBuilder().getBuilder(index);
       }
       /**
-       * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+       * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
        */
       public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFilesOrBuilder getFamilyFilesOrBuilder(
           int index) {
@@ -3686,7 +3686,7 @@ public final class SnapshotProtos {
         }
       }
       /**
-       * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+       * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
        */
       public java.util.List 
            getFamilyFilesOrBuilderList() {
@@ -3697,14 +3697,14 @@ public final class SnapshotProtos {
         }
       }
       /**
-       * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+       * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
        */
       public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles.Builder addFamilyFilesBuilder() {
         return getFamilyFilesFieldBuilder().addBuilder(
             org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles.getDefaultInstance());
       }
       /**
-       * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+       * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
        */
       public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles.Builder addFamilyFilesBuilder(
           int index) {
@@ -3712,7 +3712,7 @@ public final class SnapshotProtos {
             index, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles.getDefaultInstance());
       }
       /**
-       * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+       * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
        */
       public java.util.List 
            getFamilyFilesBuilderList() {
@@ -3733,7 +3733,7 @@ public final class SnapshotProtos {
         return familyFilesBuilder_;
       }
 
-      // @@protoc_insertion_point(builder_scope:SnapshotRegionManifest)
+      // @@protoc_insertion_point(builder_scope:hbase.pb.SnapshotRegionManifest)
     }
 
     static {
@@ -3741,53 +3741,53 @@ public final class SnapshotProtos {
       defaultInstance.initFields();
     }
 
-    // @@protoc_insertion_point(class_scope:SnapshotRegionManifest)
+    // @@protoc_insertion_point(class_scope:hbase.pb.SnapshotRegionManifest)
   }
 
   public interface SnapshotDataManifestOrBuilder
       extends com.google.protobuf.MessageOrBuilder {
 
-    // required .TableSchema table_schema = 1;
+    // required .hbase.pb.TableSchema table_schema = 1;
     /**
-     * required .TableSchema table_schema = 1;
+     * required .hbase.pb.TableSchema table_schema = 1;
      */
     boolean hasTableSchema();
     /**
-     * required .TableSchema table_schema = 1;
+     * required .hbase.pb.TableSchema table_schema = 1;
      */
     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema();
     /**
-     * required .TableSchema table_schema = 1;
+     * required .hbase.pb.TableSchema table_schema = 1;
      */
     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder();
 
-    // repeated .SnapshotRegionManifest region_manifests = 2;
+    // repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
     /**
-     * repeated .SnapshotRegionManifest region_manifests = 2;
+     * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
      */
     java.util.List 
         getRegionManifestsList();
     /**
-     * repeated .SnapshotRegionManifest region_manifests = 2;
+     * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
      */
     org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest getRegionManifests(int index);
     /**
-     * repeated .SnapshotRegionManifest region_manifests = 2;
+     * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
      */
     int getRegionManifestsCount();
     /**
-     * repeated .SnapshotRegionManifest region_manifests = 2;
+     * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
      */
     java.util.List 
         getRegionManifestsOrBuilderList();
     /**
-     * repeated .SnapshotRegionManifest region_manifests = 2;
+     * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
      */
     org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifestOrBuilder getRegionManifestsOrBuilder(
         int index);
   }
   /**
-   * Protobuf type {@code SnapshotDataManifest}
+   * Protobuf type {@code hbase.pb.SnapshotDataManifest}
    */
   public static final class SnapshotDataManifest extends
       com.google.protobuf.GeneratedMessage
@@ -3875,12 +3875,12 @@ public final class SnapshotProtos {
     }
     public static final com.google.protobuf.Descriptors.Descriptor
         getDescriptor() {
-      return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotDataManifest_descriptor;
+      return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotDataManifest_descriptor;
     }
 
     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
         internalGetFieldAccessorTable() {
-      return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotDataManifest_fieldAccessorTable
+      return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotDataManifest_fieldAccessorTable
           .ensureFieldAccessorsInitialized(
               org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDataManifest.class, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDataManifest.Builder.class);
     }
@@ -3901,58 +3901,58 @@ public final class SnapshotProtos {
     }
 
     private int bitField0_;
-    // required .TableSchema table_schema = 1;
+    // required .hbase.pb.TableSchema table_schema = 1;
     public static final int TABLE_SCHEMA_FIELD_NUMBER = 1;
     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema tableSchema_;
     /**
-     * required .TableSchema table_schema = 1;
+     * required .hbase.pb.TableSchema table_schema = 1;
      */
     public boolean hasTableSchema() {
       return ((bitField0_ & 0x00000001) == 0x00000001);
     }
     /**
-     * required .TableSchema table_schema = 1;
+     * required .hbase.pb.TableSchema table_schema = 1;
      */
     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema() {
       return tableSchema_;
     }
     /**
-     * required .TableSchema table_schema = 1;
+     * required .hbase.pb.TableSchema table_schema = 1;
      */
     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder() {
       return tableSchema_;
     }
 
-    // repeated .SnapshotRegionManifest region_manifests = 2;
+    // repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
     public static final int REGION_MANIFESTS_FIELD_NUMBER = 2;
     private java.util.List regionManifests_;
     /**
-     * repeated .SnapshotRegionManifest region_manifests = 2;
+     * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
      */
     public java.util.List getRegionManifestsList() {
       return regionManifests_;
     }
     /**
-     * repeated .SnapshotRegionManifest region_manifests = 2;
+     * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
      */
     public java.util.List 
         getRegionManifestsOrBuilderList() {
       return regionManifests_;
     }
     /**
-     * repeated .SnapshotRegionManifest region_manifests = 2;
+     * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
      */
     public int getRegionManifestsCount() {
       return regionManifests_.size();
     }
     /**
-     * repeated .SnapshotRegionManifest region_manifests = 2;
+     * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
      */
     public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest getRegionManifests(int index) {
       return regionManifests_.get(index);
     }
     /**
-     * repeated .SnapshotRegionManifest region_manifests = 2;
+     * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
      */
     public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifestOrBuilder getRegionManifestsOrBuilder(
         int index) {
@@ -4135,19 +4135,19 @@ public final class SnapshotProtos {
       return builder;
     }
     /**
-     * Protobuf type {@code SnapshotDataManifest}
+     * Protobuf type {@code hbase.pb.SnapshotDataManifest}
      */
     public static final class Builder extends
         com.google.protobuf.GeneratedMessage.Builder
        implements org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDataManifestOrBuilder {
       public static final com.google.protobuf.Descriptors.Descriptor
           getDescriptor() {
-        return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotDataManifest_descriptor;
+        return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotDataManifest_descriptor;
       }
 
       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
           internalGetFieldAccessorTable() {
-        return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotDataManifest_fieldAccessorTable
+        return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotDataManifest_fieldAccessorTable
             .ensureFieldAccessorsInitialized(
                 org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDataManifest.class, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDataManifest.Builder.class);
       }
@@ -4195,7 +4195,7 @@ public final class SnapshotProtos {
 
       public com.google.protobuf.Descriptors.Descriptor
           getDescriptorForType() {
-        return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotDataManifest_descriptor;
+        return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotDataManifest_descriptor;
       }
 
       public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDataManifest getDefaultInstanceForType() {
@@ -4317,18 +4317,18 @@ public final class SnapshotProtos {
       }
       private int bitField0_;
 
-      // required .TableSchema table_schema = 1;
+      // required .hbase.pb.TableSchema table_schema = 1;
       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance();
       private com.google.protobuf.SingleFieldBuilder<
           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> tableSchemaBuilder_;
       /**
-       * required .TableSchema table_schema = 1;
+       * required .hbase.pb.TableSchema table_schema = 1;
        */
       public boolean hasTableSchema() {
         return ((bitField0_ & 0x00000001) == 0x00000001);
       }
       /**
-       * required .TableSchema table_schema = 1;
+       * required .hbase.pb.TableSchema table_schema = 1;
        */
       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema() {
         if (tableSchemaBuilder_ == null) {
@@ -4338,7 +4338,7 @@ public final class SnapshotProtos {
         }
       }
       /**
-       * required .TableSchema table_schema = 1;
+       * required .hbase.pb.TableSchema table_schema = 1;
        */
       public Builder setTableSchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) {
         if (tableSchemaBuilder_ == null) {
@@ -4354,7 +4354,7 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * required .TableSchema table_schema = 1;
+       * required .hbase.pb.TableSchema table_schema = 1;
        */
       public Builder setTableSchema(
           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder builderForValue) {
@@ -4368,7 +4368,7 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * required .TableSchema table_schema = 1;
+       * required .hbase.pb.TableSchema table_schema = 1;
        */
       public Builder mergeTableSchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) {
         if (tableSchemaBuilder_ == null) {
@@ -4387,7 +4387,7 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * required .TableSchema table_schema = 1;
+       * required .hbase.pb.TableSchema table_schema = 1;
        */
       public Builder clearTableSchema() {
         if (tableSchemaBuilder_ == null) {
@@ -4400,7 +4400,7 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * required .TableSchema table_schema = 1;
+       * required .hbase.pb.TableSchema table_schema = 1;
        */
       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder getTableSchemaBuilder() {
         bitField0_ |= 0x00000001;
@@ -4408,7 +4408,7 @@ public final class SnapshotProtos {
         return getTableSchemaFieldBuilder().getBuilder();
       }
       /**
-       * required .TableSchema table_schema = 1;
+       * required .hbase.pb.TableSchema table_schema = 1;
        */
       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder() {
         if (tableSchemaBuilder_ != null) {
@@ -4418,7 +4418,7 @@ public final class SnapshotProtos {
         }
       }
       /**
-       * required .TableSchema table_schema = 1;
+       * required .hbase.pb.TableSchema table_schema = 1;
        */
       private com.google.protobuf.SingleFieldBuilder<
           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> 
@@ -4434,7 +4434,7 @@ public final class SnapshotProtos {
         return tableSchemaBuilder_;
       }
 
-      // repeated .SnapshotRegionManifest region_manifests = 2;
+      // repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
       private java.util.List regionManifests_ =
         java.util.Collections.emptyList();
       private void ensureRegionManifestsIsMutable() {
@@ -4448,7 +4448,7 @@ public final class SnapshotProtos {
           org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.Builder, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifestOrBuilder> regionManifestsBuilder_;
 
       /**
-       * repeated .SnapshotRegionManifest region_manifests = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
        */
       public java.util.List getRegionManifestsList() {
         if (regionManifestsBuilder_ == null) {
@@ -4458,7 +4458,7 @@ public final class SnapshotProtos {
         }
       }
       /**
-       * repeated .SnapshotRegionManifest region_manifests = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
        */
       public int getRegionManifestsCount() {
         if (regionManifestsBuilder_ == null) {
@@ -4468,7 +4468,7 @@ public final class SnapshotProtos {
         }
       }
       /**
-       * repeated .SnapshotRegionManifest region_manifests = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
        */
       public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest getRegionManifests(int index) {
         if (regionManifestsBuilder_ == null) {
@@ -4478,7 +4478,7 @@ public final class SnapshotProtos {
         }
       }
       /**
-       * repeated .SnapshotRegionManifest region_manifests = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
        */
       public Builder setRegionManifests(
           int index, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest value) {
@@ -4495,7 +4495,7 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * repeated .SnapshotRegionManifest region_manifests = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
        */
       public Builder setRegionManifests(
           int index, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.Builder builderForValue) {
@@ -4509,7 +4509,7 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * repeated .SnapshotRegionManifest region_manifests = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
        */
       public Builder addRegionManifests(org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest value) {
         if (regionManifestsBuilder_ == null) {
@@ -4525,7 +4525,7 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * repeated .SnapshotRegionManifest region_manifests = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
        */
       public Builder addRegionManifests(
           int index, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest value) {
@@ -4542,7 +4542,7 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * repeated .SnapshotRegionManifest region_manifests = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
        */
       public Builder addRegionManifests(
           org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.Builder builderForValue) {
@@ -4556,7 +4556,7 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * repeated .SnapshotRegionManifest region_manifests = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
        */
       public Builder addRegionManifests(
           int index, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.Builder builderForValue) {
@@ -4570,7 +4570,7 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * repeated .SnapshotRegionManifest region_manifests = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
        */
       public Builder addAllRegionManifests(
           java.lang.Iterable values) {
@@ -4584,7 +4584,7 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * repeated .SnapshotRegionManifest region_manifests = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
        */
       public Builder clearRegionManifests() {
         if (regionManifestsBuilder_ == null) {
@@ -4597,7 +4597,7 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * repeated .SnapshotRegionManifest region_manifests = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
        */
       public Builder removeRegionManifests(int index) {
         if (regionManifestsBuilder_ == null) {
@@ -4610,14 +4610,14 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * repeated .SnapshotRegionManifest region_manifests = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
        */
       public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.Builder getRegionManifestsBuilder(
           int index) {
         return getRegionManifestsFieldBuilder().getBuilder(index);
       }
       /**
-       * repeated .SnapshotRegionManifest region_manifests = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
        */
       public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifestOrBuilder getRegionManifestsOrBuilder(
           int index) {
@@ -4627,7 +4627,7 @@ public final class SnapshotProtos {
         }
       }
       /**
-       * repeated .SnapshotRegionManifest region_manifests = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
        */
       public java.util.List 
            getRegionManifestsOrBuilderList() {
@@ -4638,14 +4638,14 @@ public final class SnapshotProtos {
         }
       }
       /**
-       * repeated .SnapshotRegionManifest region_manifests = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
        */
       public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.Builder addRegionManifestsBuilder() {
         return getRegionManifestsFieldBuilder().addBuilder(
             org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.getDefaultInstance());
       }
       /**
-       * repeated .SnapshotRegionManifest region_manifests = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
        */
       public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.Builder addRegionManifestsBuilder(
           int index) {
@@ -4653,7 +4653,7 @@ public final class SnapshotProtos {
             index, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.getDefaultInstance());
       }
       /**
-       * repeated .SnapshotRegionManifest region_manifests = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
        */
       public java.util.List 
            getRegionManifestsBuilderList() {
@@ -4674,7 +4674,7 @@ public final class SnapshotProtos {
         return regionManifestsBuilder_;
       }
 
-      // @@protoc_insertion_point(builder_scope:SnapshotDataManifest)
+      // @@protoc_insertion_point(builder_scope:hbase.pb.SnapshotDataManifest)
     }
 
     static {
@@ -4682,34 +4682,34 @@ public final class SnapshotProtos {
       defaultInstance.initFields();
     }
 
-    // @@protoc_insertion_point(class_scope:SnapshotDataManifest)
+    // @@protoc_insertion_point(class_scope:hbase.pb.SnapshotDataManifest)
   }
 
   private static com.google.protobuf.Descriptors.Descriptor
-    internal_static_SnapshotFileInfo_descriptor;
+    internal_static_hbase_pb_SnapshotFileInfo_descriptor;
   private static
     com.google.protobuf.GeneratedMessage.FieldAccessorTable
-      internal_static_SnapshotFileInfo_fieldAccessorTable;
+      internal_static_hbase_pb_SnapshotFileInfo_fieldAccessorTable;
   private static com.google.protobuf.Descriptors.Descriptor
-    internal_static_SnapshotRegionManifest_descriptor;
+    internal_static_hbase_pb_SnapshotRegionManifest_descriptor;
   private static
     com.google.protobuf.GeneratedMessage.FieldAccessorTable
-      internal_static_SnapshotRegionManifest_fieldAccessorTable;
+      internal_static_hbase_pb_SnapshotRegionManifest_fieldAccessorTable;
   private static com.google.protobuf.Descriptors.Descriptor
-    internal_static_SnapshotRegionManifest_StoreFile_descriptor;
+    internal_static_hbase_pb_SnapshotRegionManifest_StoreFile_descriptor;
   private static
     com.google.protobuf.GeneratedMessage.FieldAccessorTable
-      internal_static_SnapshotRegionManifest_StoreFile_fieldAccessorTable;
+      internal_static_hbase_pb_SnapshotRegionManifest_StoreFile_fieldAccessorTable;
   private static com.google.protobuf.Descriptors.Descriptor
-    internal_static_SnapshotRegionManifest_FamilyFiles_descriptor;
+    internal_static_hbase_pb_SnapshotRegionManifest_FamilyFiles_descriptor;
   private static
     com.google.protobuf.GeneratedMessage.FieldAccessorTable
-      internal_static_SnapshotRegionManifest_FamilyFiles_fieldAccessorTable;
+      internal_static_hbase_pb_SnapshotRegionManifest_FamilyFiles_fieldAccessorTable;
   private static com.google.protobuf.Descriptors.Descriptor
-    internal_static_SnapshotDataManifest_descriptor;
+    internal_static_hbase_pb_SnapshotDataManifest_descriptor;
   private static
     com.google.protobuf.GeneratedMessage.FieldAccessorTable
-      internal_static_SnapshotDataManifest_fieldAccessorTable;
+      internal_static_hbase_pb_SnapshotDataManifest_fieldAccessorTable;
 
   public static com.google.protobuf.Descriptors.FileDescriptor
       getDescriptor() {
@@ -4719,58 +4719,60 @@ public final class SnapshotProtos {
       descriptor;
   static {
     java.lang.String[] descriptorData = {
-      "\n\016Snapshot.proto\032\010FS.proto\032\013HBase.proto\"" +
-      "\211\001\n\020SnapshotFileInfo\022$\n\004type\030\001 \002(\0162\026.Sna" +
-      "pshotFileInfo.Type\022\r\n\005hfile\030\003 \001(\t\022\022\n\nwal" +
-      "_server\030\004 \001(\t\022\020\n\010wal_name\030\005 \001(\t\"\032\n\004Type\022" +
-      "\t\n\005HFILE\020\001\022\007\n\003WAL\020\002\"\257\002\n\026SnapshotRegionMa" +
-      "nifest\022\017\n\007version\030\001 \001(\005\022 \n\013region_info\030\002" +
-      " \002(\0132\013.RegionInfo\0229\n\014family_files\030\003 \003(\0132" +
-      "#.SnapshotRegionManifest.FamilyFiles\032K\n\t" +
-      "StoreFile\022\014\n\004name\030\001 \002(\t\022\035\n\treference\030\002 \001" +
-      "(\0132\n.Reference\022\021\n\tfile_size\030\003 \001(\004\032Z\n\013Fam",
-      "ilyFiles\022\023\n\013family_name\030\001 \002(\014\0226\n\013store_f" +
-      "iles\030\002 \003(\0132!.SnapshotRegionManifest.Stor" +
-      "eFile\"m\n\024SnapshotDataManifest\022\"\n\014table_s" +
-      "chema\030\001 \002(\0132\014.TableSchema\0221\n\020region_mani" +
-      "fests\030\002 \003(\0132\027.SnapshotRegionManifestBD\n*" +
-      "org.apache.hadoop.hbase.protobuf.generat" +
-      "edB\016SnapshotProtosH\001\210\001\001\240\001\001"
+      "\n\016Snapshot.proto\022\010hbase.pb\032\010FS.proto\032\013HB" +
+      "ase.proto\"\222\001\n\020SnapshotFileInfo\022-\n\004type\030\001" +
+      " \002(\0162\037.hbase.pb.SnapshotFileInfo.Type\022\r\n" +
+      "\005hfile\030\003 \001(\t\022\022\n\nwal_server\030\004 \001(\t\022\020\n\010wal_" +
+      "name\030\005 \001(\t\"\032\n\004Type\022\t\n\005HFILE\020\001\022\007\n\003WAL\020\002\"\323" +
+      "\002\n\026SnapshotRegionManifest\022\017\n\007version\030\001 \001" +
+      "(\005\022)\n\013region_info\030\002 \002(\0132\024.hbase.pb.Regio" +
+      "nInfo\022B\n\014family_files\030\003 \003(\0132,.hbase.pb.S" +
+      "napshotRegionManifest.FamilyFiles\032T\n\tSto" +
+      "reFile\022\014\n\004name\030\001 \002(\t\022&\n\treference\030\002 \001(\0132",
+      "\023.hbase.pb.Reference\022\021\n\tfile_size\030\003 \001(\004\032" +
+      "c\n\013FamilyFiles\022\023\n\013family_name\030\001 \002(\014\022?\n\013s" +
+      "tore_files\030\002 \003(\0132*.hbase.pb.SnapshotRegi" +
+      "onManifest.StoreFile\"\177\n\024SnapshotDataMani" +
+      "fest\022+\n\014table_schema\030\001 \002(\0132\025.hbase.pb.Ta" +
+      "bleSchema\022:\n\020region_manifests\030\002 \003(\0132 .hb" +
+      "ase.pb.SnapshotRegionManifestBD\n*org.apa" +
+      "che.hadoop.hbase.protobuf.generatedB\016Sna" +
+      "pshotProtosH\001\210\001\001\240\001\001"
     };
     com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
       new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
         public com.google.protobuf.ExtensionRegistry assignDescriptors(
             com.google.protobuf.Descriptors.FileDescriptor root) {
           descriptor = root;
-          internal_static_SnapshotFileInfo_descriptor =
+          internal_static_hbase_pb_SnapshotFileInfo_descriptor =
             getDescriptor().getMessageTypes().get(0);
-          internal_static_SnapshotFileInfo_fieldAccessorTable = new
+          internal_static_hbase_pb_SnapshotFileInfo_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
-              internal_static_SnapshotFileInfo_descriptor,
+              internal_static_hbase_pb_SnapshotFileInfo_descriptor,
               new java.lang.String[] { "Type", "Hfile", "WalServer", "WalName", });
-          internal_static_SnapshotRegionManifest_descriptor =
+          internal_static_hbase_pb_SnapshotRegionManifest_descriptor =
             getDescriptor().getMessageTypes().get(1);
-          internal_static_SnapshotRegionManifest_fieldAccessorTable = new
+          internal_static_hbase_pb_SnapshotRegionManifest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
-              internal_static_SnapshotRegionManifest_descriptor,
+              internal_static_hbase_pb_SnapshotRegionManifest_descriptor,
               new java.lang.String[] { "Version", "RegionInfo", "FamilyFiles", });
-          internal_static_SnapshotRegionManifest_StoreFile_descriptor =
-            internal_static_SnapshotRegionManifest_descriptor.getNestedTypes().get(0);
-          internal_static_SnapshotRegionManifest_StoreFile_fieldAccessorTable = new
+          internal_static_hbase_pb_SnapshotRegionManifest_StoreFile_descriptor =
+            internal_static_hbase_pb_SnapshotRegionManifest_descriptor.getNestedTypes().get(0);
+          internal_static_hbase_pb_SnapshotRegionManifest_StoreFile_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
-              internal_static_SnapshotRegionManifest_StoreFile_descriptor,
+              internal_static_hbase_pb_SnapshotRegionManifest_StoreFile_descriptor,
               new java.lang.String[] { "Name", "Reference", "FileSize", });
-          internal_static_SnapshotRegionManifest_FamilyFiles_descriptor =
-            internal_static_SnapshotRegionManifest_descriptor.getNestedTypes().get(1);
-          internal_static_SnapshotRegionManifest_FamilyFiles_fieldAccessorTable = new
+          internal_static_hbase_pb_SnapshotRegionManifest_FamilyFiles_descriptor =
+            internal_static_hbase_pb_SnapshotRegionManifest_descriptor.getNestedTypes().get(1);
+          internal_static_hbase_pb_SnapshotRegionManifest_FamilyFiles_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
-              internal_static_SnapshotRegionManifest_FamilyFiles_descriptor,
+              internal_static_hbase_pb_SnapshotRegionManifest_FamilyFiles_descriptor,
               new java.lang.String[] { "FamilyName", "StoreFiles", });
-          internal_static_SnapshotDataManifest_descriptor =
+          internal_static_hbase_pb_SnapshotDataManifest_descriptor =
             getDescriptor().getMessageTypes().get(2);
-          internal_static_SnapshotDataManifest_fieldAccessorTable = new
+          internal_static_hbase_pb_SnapshotDataManifest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
-              internal_static_SnapshotDataManifest_descriptor,
+              internal_static_hbase_pb_SnapshotDataManifest_descriptor,
               new java.lang.String[] { "TableSchema", "RegionManifests", });
           return null;
         }
diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.java
index 4371739..0240a67 100644
--- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.java
+++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.java
@@ -8196,6 +8196,450 @@ public final class ZooKeeperProtos {
     // @@protoc_insertion_point(class_scope:hbase.pb.TableLock)
   }
 
+  public interface SwitchStateOrBuilder
+      extends com.google.protobuf.MessageOrBuilder {
+
+    // optional bool enabled = 1;
+    /**
+     * optional bool enabled = 1;
+     */
+    boolean hasEnabled();
+    /**
+     * optional bool enabled = 1;
+     */
+    boolean getEnabled();
+  }
+  /**
+   * Protobuf type {@code hbase.pb.SwitchState}
+   *
+   * 
+   **
+   * State of the switch.
+   * 
+ */ + public static final class SwitchState extends + com.google.protobuf.GeneratedMessage + implements SwitchStateOrBuilder { + // Use SwitchState.newBuilder() to construct. + private SwitchState(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private SwitchState(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final SwitchState defaultInstance; + public static SwitchState getDefaultInstance() { + return defaultInstance; + } + + public SwitchState getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private SwitchState( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + enabled_ = input.readBool(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_SwitchState_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_SwitchState_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SwitchState.class, org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SwitchState.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public SwitchState parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SwitchState(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // optional bool enabled = 1; + public static final int ENABLED_FIELD_NUMBER = 1; + private boolean enabled_; + /** + * optional bool enabled = 1; + */ + public boolean hasEnabled() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * optional bool enabled = 1; + */ + public boolean getEnabled() { + return enabled_; + } + + private void initFields() { + enabled_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBool(1, enabled_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(1, enabled_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SwitchState)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SwitchState other = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SwitchState) obj; + + boolean result = true; + result = result && (hasEnabled() == other.hasEnabled()); + if (hasEnabled()) { + result = result && (getEnabled() + == other.getEnabled()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasEnabled()) { + hash = (37 * hash) + ENABLED_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getEnabled()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SwitchState parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SwitchState parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SwitchState parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SwitchState parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SwitchState parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SwitchState parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SwitchState parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SwitchState parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SwitchState parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SwitchState parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SwitchState prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code hbase.pb.SwitchState} + * + *
+     **
+     * State of the switch.
+     * 
+ */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SwitchStateOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_SwitchState_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_SwitchState_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SwitchState.class, org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SwitchState.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SwitchState.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + enabled_ = false; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_SwitchState_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SwitchState getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SwitchState.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SwitchState build() { + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SwitchState result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SwitchState buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SwitchState result = new org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SwitchState(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.enabled_ = enabled_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SwitchState) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SwitchState)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SwitchState other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SwitchState.getDefaultInstance()) return this; + if (other.hasEnabled()) { + setEnabled(other.getEnabled()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SwitchState parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SwitchState) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // optional bool enabled = 1; + private boolean enabled_ ; + /** + * optional bool enabled = 1; + */ + public boolean hasEnabled() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * optional bool enabled = 1; + */ + public boolean getEnabled() { + return enabled_; + } + /** + * optional bool enabled = 1; + */ + public Builder setEnabled(boolean value) { + bitField0_ |= 0x00000001; + enabled_ = value; + onChanged(); + return this; + } + /** + * optional bool enabled = 1; + */ + public Builder clearEnabled() { + bitField0_ = (bitField0_ & ~0x00000001); + enabled_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:hbase.pb.SwitchState) + } + + static { + defaultInstance = new SwitchState(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:hbase.pb.SwitchState) + } + private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_MetaRegionServer_descriptor; private static @@ -8246,6 +8690,11 @@ public final class ZooKeeperProtos { private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_TableLock_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_hbase_pb_SwitchState_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_hbase_pb_SwitchState_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { @@ -8286,9 +8735,10 @@ public final class ZooKeeperProtos { "\022\'\n\ntable_name\030\001 \001(\0132\023.hbase.pb.TableNam" + "e\022(\n\nlock_owner\030\002 \001(\0132\024.hbase.pb.ServerN", "ame\022\021\n\tthread_id\030\003 \001(\003\022\021\n\tis_shared\030\004 \001(" + - "\010\022\017\n\007purpose\030\005 \001(\t\022\023\n\013create_time\030\006 \001(\003B" + - "E\n*org.apache.hadoop.hbase.protobuf.gene" + - "ratedB\017ZooKeeperProtosH\001\210\001\001\240\001\001" + "\010\022\017\n\007purpose\030\005 \001(\t\022\023\n\013create_time\030\006 \001(\003\"" + + "\036\n\013SwitchState\022\017\n\007enabled\030\001 \001(\010BE\n*org.a" + + "pache.hadoop.hbase.protobuf.generatedB\017Z" + + "ooKeeperProtosH\001\210\001\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { @@ -8355,6 +8805,12 @@ public final class ZooKeeperProtos { com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_TableLock_descriptor, new java.lang.String[] { "TableName", "LockOwner", "ThreadId", "IsShared", "Purpose", "CreateTime", }); + internal_static_hbase_pb_SwitchState_descriptor = + getDescriptor().getMessageTypes().get(10); + internal_static_hbase_pb_SwitchState_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_hbase_pb_SwitchState_descriptor, + new java.lang.String[] { "Enabled", }); return null; } }; diff --git a/hbase-protocol/src/main/protobuf/Master.proto b/hbase-protocol/src/main/protobuf/Master.proto index aa31a5e..79bb862 100644 --- a/hbase-protocol/src/main/protobuf/Master.proto +++ b/hbase-protocol/src/main/protobuf/Master.proto @@ -279,6 +279,29 @@ message IsBalancerEnabledResponse { required bool enabled = 1; } +enum MasterSwitchType { + SPLIT = 0; + MERGE = 1; +} + +message SetSplitOrMergeEnabledRequest { + required bool enabled = 1; + optional bool synchronous = 2; + repeated MasterSwitchType switch_types = 3; +} + +message SetSplitOrMergeEnabledResponse { + repeated bool prev_value = 1; +} + +message IsSplitOrMergeEnabledRequest { + required MasterSwitchType switch_type = 1; +} + +message IsSplitOrMergeEnabledResponse { + required bool enabled = 1; +} + message NormalizeRequest { } @@ -633,6 +656,19 @@ service MasterService { returns(IsBalancerEnabledResponse); /** + * Turn the split or merge switch on or off. + * If synchronous is true, it waits until current operation call, if outstanding, to return. + */ + rpc SetSplitOrMergeEnabled(SetSplitOrMergeEnabledRequest) + returns(SetSplitOrMergeEnabledResponse); + + /** + * Query whether the split or merge switch is on/off. + */ + rpc IsSplitOrMergeEnabled(IsSplitOrMergeEnabledRequest) + returns(IsSplitOrMergeEnabledResponse); + + /** * Run region normalizer. Can NOT run for various reasons. Check logs. */ rpc Normalize(NormalizeRequest) diff --git a/hbase-protocol/src/main/protobuf/ZooKeeper.proto b/hbase-protocol/src/main/protobuf/ZooKeeper.proto index 54652af..4963c09 100644 --- a/hbase-protocol/src/main/protobuf/ZooKeeper.proto +++ b/hbase-protocol/src/main/protobuf/ZooKeeper.proto @@ -153,3 +153,10 @@ message TableLock { optional string purpose = 5; optional int64 create_time = 6; } + +/** + * State of the switch. + */ +message SwitchState { + optional bool enabled = 1; +} \ No newline at end of file diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentManager.java index 7639004..7ce98e0 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentManager.java @@ -59,6 +59,7 @@ import org.apache.hadoop.hbase.RegionStateListener; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableNotFoundException; +import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.RegionReplicaUtil; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.TableState; @@ -2311,6 +2312,10 @@ public class AssignmentManager { return hri.getShortNameToLog() + " is not opening on " + serverName; } + if (!((HMaster)server).getSplitOrMergeTracker().isSplitOrMergeEnabled(Admin.MasterSwitchType.SPLIT)) { + return "split switch is off!"; + } + // Just return in case of retrying if (current.isSplitting()) { return null; @@ -2469,6 +2474,9 @@ public class AssignmentManager { return "Merging daughter region already exists, p=" + current; } + if (!((HMaster)server).getSplitOrMergeTracker().isSplitOrMergeEnabled(Admin.MasterSwitchType.MERGE)) { + return "merge switch is off!"; + } // Just return in case of retrying if (current != null) { return null; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java index 3cf750e..7549038 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java @@ -76,6 +76,7 @@ import org.apache.hadoop.hbase.TableNotDisabledException; import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.hadoop.hbase.UnknownRegionException; import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.RegionReplicaUtil; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.TableState; @@ -155,6 +156,7 @@ import org.apache.hadoop.hbase.zookeeper.MasterAddressTracker; import org.apache.hadoop.hbase.zookeeper.MetaTableLocator; import org.apache.hadoop.hbase.zookeeper.RegionNormalizerTracker; import org.apache.hadoop.hbase.zookeeper.RegionServerTracker; +import org.apache.hadoop.hbase.zookeeper.SplitOrMergeTrackerManager; import org.apache.hadoop.hbase.zookeeper.ZKClusterId; import org.apache.hadoop.hbase.zookeeper.ZKUtil; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; @@ -253,6 +255,9 @@ public class HMaster extends HRegionServer implements MasterServices { // Tracker for load balancer state LoadBalancerTracker loadBalancerTracker; + // Tracker for split and merge state + SplitOrMergeTrackerManager splitOrMergeTracker; + // Tracker for region normalizer state private RegionNormalizerTracker regionNormalizerTracker; @@ -578,8 +583,13 @@ public class HMaster extends HRegionServer implements MasterServices { this.normalizer.setMasterServices(this); this.loadBalancerTracker = new LoadBalancerTracker(zooKeeper, this); this.loadBalancerTracker.start(); + this.regionNormalizerTracker = new RegionNormalizerTracker(zooKeeper, this); this.regionNormalizerTracker.start(); + + this.splitOrMergeTracker = new SplitOrMergeTrackerManager(zooKeeper, conf, this); + this.splitOrMergeTracker.start(); + this.assignmentManager = new AssignmentManager(this, serverManager, this.balancer, this.service, this.metricsMaster, this.tableLockManager, tableStateManager); @@ -2779,6 +2789,20 @@ public class HMaster extends HRegionServer implements MasterServices { return null == regionNormalizerTracker? false: regionNormalizerTracker.isNormalizerOn(); } + + /** + * Queries the state of the {@link SplitOrMergeTrackerManager}. If it is not initialized, + * false is returned. If switchType is illegal, false will return. + * @param switchType see {@link Admin.MasterSwitchType} + * @return The state of the switch + */ + public boolean isSplitOrMergeEnabled(Admin.MasterSwitchType switchType) { + if (null == splitOrMergeTracker) { + return false; + } + return splitOrMergeTracker.isSplitOrMergeEnabled(switchType); + } + /** * Fetch the configured {@link LoadBalancer} class name. If none is set, a default is returned. * @@ -2795,4 +2819,8 @@ public class HMaster extends HRegionServer implements MasterServices { public RegionNormalizerTracker getRegionNormalizerTracker() { return regionNormalizerTracker; } + + public SplitOrMergeTrackerManager getSplitOrMergeTracker() { + return splitOrMergeTracker; + } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java index 1dd4c14..1c770d1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java @@ -41,6 +41,7 @@ import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.UnknownRegionException; import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.TableState; import org.apache.hadoop.hbase.errorhandling.ForeignException; import org.apache.hadoop.hbase.exceptions.MergeRegionException; @@ -1506,6 +1507,35 @@ public class MasterRpcServices extends RSRpcServices } @Override + public SetSplitOrMergeEnabledResponse setSplitOrMergeEnabled(RpcController controller, + SetSplitOrMergeEnabledRequest request) throws ServiceException { + SetSplitOrMergeEnabledResponse.Builder response = SetSplitOrMergeEnabledResponse.newBuilder(); + try { + master.checkInitialized(); + for (MasterSwitchType masterSwitchType : request.getSwitchTypesList()) { + Admin.MasterSwitchType switchType = convert(masterSwitchType); + boolean oldValue = master.isSplitOrMergeEnabled(switchType); + boolean newValue = request.getEnabled(); + master.getSplitOrMergeTracker().setSplitOrMergeEnabled(newValue, switchType); + response.addPrevValue(oldValue); + } + } catch (IOException e) { + throw new ServiceException(e); + } catch (KeeperException e) { + throw new ServiceException(e); + } + return response.build(); + } + + @Override + public IsSplitOrMergeEnabledResponse isSplitOrMergeEnabled(RpcController controller, + IsSplitOrMergeEnabledRequest request) throws ServiceException { + IsSplitOrMergeEnabledResponse.Builder response = IsSplitOrMergeEnabledResponse.newBuilder(); + response.setEnabled(master.isSplitOrMergeEnabled(convert(request.getSwitchType()))); + return response.build(); + } + + @Override public NormalizeResponse normalize(RpcController controller, NormalizeRequest request) throws ServiceException { try { @@ -1574,4 +1604,16 @@ public class MasterRpcServices extends RSRpcServices } return response.build(); } + + private Admin.MasterSwitchType convert(MasterSwitchType switchType) { + switch (switchType) { + case SPLIT: + return Admin.MasterSwitchType.SPLIT; + case MERGE: + return Admin.MasterSwitchType.MERGE; + default: + break; + } + return null; + } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java index a9113ec..93287ad 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java @@ -235,6 +235,7 @@ public class HBaseFsck extends Configured implements Closeable { private long timelag = DEFAULT_TIME_LAG; // tables whose modtime is older private static boolean forceExclusive = false; // only this hbck can modify HBase private static boolean disableBalancer = false; // disable load balancer to keep regions stable + private static boolean disableSplitAndMerge = false; // disable split and merge private boolean fixAssignments = false; // fix assignment errors? private boolean fixMeta = false; // fix meta errors? private boolean checkHdfs = true; // load and check fs consistency? @@ -683,6 +684,11 @@ public class HBaseFsck extends Configured implements Closeable { if (shouldDisableBalancer()) { oldBalancer = admin.setBalancerRunning(false, true); } + boolean[] oldSplitAndMerge = null; + if (shouldDisableSplitAndMerge()) { + oldSplitAndMerge = admin.setSplitOrMergeEnabled(false, false, + Admin.MasterSwitchType.SPLIT, Admin.MasterSwitchType.MERGE); + } try { onlineConsistencyRepair(); @@ -694,6 +700,17 @@ public class HBaseFsck extends Configured implements Closeable { if (shouldDisableBalancer() && oldBalancer) { admin.setBalancerRunning(oldBalancer, false); } + + if (shouldDisableSplitAndMerge()) { + if (oldSplitAndMerge != null) { + if (oldSplitAndMerge[0]) { + admin.setSplitOrMergeEnabled(true, false, Admin.MasterSwitchType.SPLIT); + } + if (oldSplitAndMerge[1]) { + admin.setSplitOrMergeEnabled(true, false, Admin.MasterSwitchType.MERGE); + } + } + } } if (checkRegionBoundaries) { @@ -4184,6 +4201,13 @@ public class HBaseFsck extends Configured implements Closeable { } /** + * Disable the split and merge + */ + public static void setDisableSplitAndMerge() { + disableSplitAndMerge = true; + } + + /** * The balancer should be disabled if we are modifying HBase. * It can be disabled if you want to prevent region movement from causing * false positives. @@ -4193,6 +4217,15 @@ public class HBaseFsck extends Configured implements Closeable { } /** + * The split and merge should be disabled if we are modifying HBase. + * It can be disabled if you want to prevent region movement from causing + * false positives. + */ + public boolean shouldDisableSplitAndMerge() { + return fixAny || disableSplitAndMerge; + } + + /** * Set summary mode. * Print only summary of the tables and status (OK or INCONSISTENT) */ @@ -4551,6 +4584,8 @@ public class HBaseFsck extends Configured implements Closeable { setForceExclusive(); } else if (cmd.equals("-disableBalancer")) { setDisableBalancer(); + } else if (cmd.equals("-disableSplitAndMerge")) { + setDisableSplitAndMerge(); } else if (cmd.equals("-timelag")) { if (i == args.length - 1) { errors.reportError(ERROR_CODE.WRONG_USAGE, "HBaseFsck: -timelag needs a value."); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/SplitOrMergeTrackerManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/SplitOrMergeTrackerManager.java new file mode 100644 index 0000000..d115af3 --- /dev/null +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/SplitOrMergeTrackerManager.java @@ -0,0 +1,149 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.zookeeper; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.Abortable; +import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.client.Admin; +import org.apache.hadoop.hbase.exceptions.DeserializationException; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SwitchState; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.zookeeper.KeeperException; + +import java.io.IOException; + +/** + * Tracks the switch of split and merge states in ZK + * + */ +@InterfaceAudience.Private +public class SplitOrMergeTrackerManager { + + private String splitZnode; + private String mergeZnode; + + private SwitchStateTracker splitStateTracker; + private SwitchStateTracker mergeStateTracker; + + public SplitOrMergeTrackerManager(ZooKeeperWatcher watcher, Configuration conf, Abortable abortable) { + try { + if (ZKUtil.checkExists(watcher, watcher.getSwitchZNode()) < 0) { + ZKUtil.createAndFailSilent(watcher, watcher.getSwitchZNode()); + } + } catch (KeeperException e) { + throw new RuntimeException(e); + } + splitZnode = ZKUtil.joinZNode(watcher.getSwitchZNode(), + conf.get("zookeeper.znode.switch.split", "split")); + mergeZnode = ZKUtil.joinZNode(watcher.getSwitchZNode(), + conf.get("zookeeper.znode.switch.merge", "merge")); + splitStateTracker = new SwitchStateTracker(watcher, splitZnode, abortable); + mergeStateTracker = new SwitchStateTracker(watcher, mergeZnode, abortable); + } + + public void start() { + splitStateTracker.start(); + mergeStateTracker.start(); + } + + public boolean isSplitOrMergeEnabled(Admin.MasterSwitchType switchType) { + switch (switchType) { + case SPLIT: + return splitStateTracker.isSwitchEnabled(); + case MERGE: + return mergeStateTracker.isSwitchEnabled(); + default: + break; + } + return false; + } + + public void setSplitOrMergeEnabled(boolean enabled, Admin.MasterSwitchType switchType) + throws KeeperException { + switch (switchType) { + case SPLIT: + splitStateTracker.setSwitchEnabled(enabled); + break; + case MERGE: + mergeStateTracker.setSwitchEnabled(enabled); + break; + default: + break; + } + } + + class SwitchStateTracker extends ZooKeeperNodeTracker { + + public SwitchStateTracker(ZooKeeperWatcher watcher, String node, Abortable abortable) { + super(watcher, node, abortable); + } + + /** + * Return true if the switch is on, false otherwise + */ + public boolean isSwitchEnabled() { + byte [] upData = super.getData(false); + try { + // if data in ZK is null, use default of on. + return upData == null || parseFrom(upData).getEnabled(); + } catch (DeserializationException dex) { + LOG.error("ZK state for LoadBalancer could not be parsed " + Bytes.toStringBinary(upData)); + // return false to be safe. + return false; + } + } + + /** + * Set the switch on/off + * @param enabled switch enabled or not? + * @throws KeeperException keepException will be thrown out + */ + public void setSwitchEnabled(boolean enabled) throws KeeperException { + byte [] upData = toByteArray(enabled); + try { + ZKUtil.setData(watcher, node, upData); + } catch(KeeperException.NoNodeException nne) { + ZKUtil.createAndWatch(watcher, node, upData); + } + super.nodeDataChanged(node); + } + + private byte [] toByteArray(boolean enabled) { + SwitchState.Builder builder = SwitchState.newBuilder(); + builder.setEnabled(enabled); + return ProtobufUtil.prependPBMagic(builder.build().toByteArray()); + } + + private SwitchState parseFrom(byte [] bytes) + throws DeserializationException { + ProtobufUtil.expectPBMagicPrefix(bytes); + SwitchState.Builder builder = SwitchState.newBuilder(); + try { + int magicLen = ProtobufUtil.lengthOfPBMagic(); + ProtobufUtil.mergeFrom(builder, bytes, magicLen, bytes.length - magicLen); + } catch (IOException e) { + throw new DeserializationException(e); + } + return builder.build(); + } + } + + +} diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSplitOrMergeStatus.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSplitOrMergeStatus.java new file mode 100644 index 0000000..6405a14 --- /dev/null +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSplitOrMergeStatus.java @@ -0,0 +1,198 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.client; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.HRegionInfo; +import org.apache.hadoop.hbase.HRegionLocation; +import org.apache.hadoop.hbase.MiniHBaseCluster; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.regionserver.HRegion; +import org.apache.hadoop.hbase.regionserver.Region; +import org.apache.hadoop.hbase.testclassification.ClientTests; +import org.apache.hadoop.hbase.testclassification.MediumTests; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.JVMClusterUtil; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +import java.io.IOException; +import java.util.List; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +@Category({MediumTests.class, ClientTests.class}) +public class TestSplitOrMergeStatus { + + private static final Log LOG = LogFactory.getLog(TestSplitOrMergeStatus.class); + private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); + private static byte [] FAMILY = Bytes.toBytes("testFamily"); + + /** + * @throws java.lang.Exception + */ + @BeforeClass + public static void setUpBeforeClass() throws Exception { + TEST_UTIL.startMiniCluster(2); + } + + /** + * @throws java.lang.Exception + */ + @AfterClass + public static void tearDownAfterClass() throws Exception { + TEST_UTIL.shutdownMiniCluster(); + } + + @Test + public void testSplitSwitch() throws Exception { + TableName name = TableName.valueOf("testSplitSwitch"); + Table t = TEST_UTIL.createTable(name, FAMILY); + TEST_UTIL.loadTable(t, FAMILY, false); + + RegionLocator locator = TEST_UTIL.getConnection().getRegionLocator(t.getName()); + int orignalCount = locator.getAllRegionLocations().size(); + + Admin admin = TEST_UTIL.getAdmin(); + initSwitchStatus(admin); + boolean[] results = admin.setSplitOrMergeEnabled(false, false, Admin.MasterSwitchType.SPLIT); + assertEquals(results.length, 1); + assertTrue(results[0]); + admin.split(t.getName()); + int count = waitOnSplitOrMerge(t).size(); + assertTrue(orignalCount == count); + + results = admin.setSplitOrMergeEnabled(true, false, Admin.MasterSwitchType.SPLIT); + assertEquals(results.length, 1); + assertFalse(results[0]); + admin.split(t.getName()); + count = waitOnSplitOrMerge(t).size(); + assertTrue(orignalCount regions = admin.getTableRegions(t.getName()); + assertTrue(regions.size() > 1); + admin.mergeRegions(regions.get(0).getEncodedNameAsBytes(), + regions.get(1).getEncodedNameAsBytes(), true); + int count = waitOnSplitOrMerge(t).size(); + assertTrue(orignalCount == count); + + waitForMergable(admin, name); + results = admin.setSplitOrMergeEnabled(true, false, Admin.MasterSwitchType.MERGE); + assertEquals(results.length, 1); + assertFalse(results[0]); + admin.mergeRegions(regions.get(0).getEncodedNameAsBytes(), + regions.get(1).getEncodedNameAsBytes(), true); + count = waitOnSplitOrMerge(t).size(); + assertTrue(orignalCount>count); + admin.close(); + } + + @Test + public void testMultiSwitches() throws IOException { + Admin admin = TEST_UTIL.getAdmin(); + boolean[] switches = admin.setSplitOrMergeEnabled(false, false, + Admin.MasterSwitchType.SPLIT, Admin.MasterSwitchType.MERGE); + for (boolean s : switches){ + assertTrue(s); + } + assertFalse(admin.isSplitOrMergeEnabled(Admin.MasterSwitchType.SPLIT)); + assertFalse(admin.isSplitOrMergeEnabled(Admin.MasterSwitchType.MERGE)); + admin.close(); + } + + private void initSwitchStatus(Admin admin) throws IOException { + if (!admin.isSplitOrMergeEnabled(Admin.MasterSwitchType.SPLIT)) { + admin.setSplitOrMergeEnabled(true, false, Admin.MasterSwitchType.SPLIT); + } + if (!admin.isSplitOrMergeEnabled(Admin.MasterSwitchType.MERGE)) { + admin.setSplitOrMergeEnabled(true, false, Admin.MasterSwitchType.MERGE); + } + assertTrue(admin.isSplitOrMergeEnabled(Admin.MasterSwitchType.SPLIT)); + assertTrue(admin.isSplitOrMergeEnabled(Admin.MasterSwitchType.MERGE)); + } + + private void waitForMergable(Admin admin, TableName t) throws InterruptedException, IOException { + // Wait for the Regions to be mergeable + MiniHBaseCluster miniCluster = TEST_UTIL.getMiniHBaseCluster(); + int mergeable = 0; + while (mergeable < 2) { + Thread.sleep(100); + admin.majorCompact(t); + mergeable = 0; + for (JVMClusterUtil.RegionServerThread regionThread: miniCluster.getRegionServerThreads()) { + for (Region region: regionThread.getRegionServer().getOnlineRegions(t)) { + mergeable += ((HRegion)region).isMergeable() ? 1 : 0; + } + } + } + } + + /* + * Wait on table split. May return because we waited long enough on the split + * and it didn't happen. Caller should check. + * @param t + * @return Map of table regions; caller needs to check table actually split. + */ + private List waitOnSplitOrMerge(final Table t) + throws IOException { + try (RegionLocator locator = TEST_UTIL.getConnection().getRegionLocator(t.getName())) { + List regions = locator.getAllRegionLocations(); + int originalCount = regions.size(); + for (int i = 0; i < TEST_UTIL.getConfiguration().getInt("hbase.test.retries", 10); i++) { + Thread.currentThread(); + try { + Thread.sleep(1000); + } catch (InterruptedException e) { + e.printStackTrace(); + } + regions = locator.getAllRegionLocations(); + if (regions.size() != originalCount) + break; + } + return regions; + } + } + +} diff --git a/hbase-shell/src/main/ruby/hbase/admin.rb b/hbase-shell/src/main/ruby/hbase/admin.rb index 82f0700..28d4d46 100644 --- a/hbase-shell/src/main/ruby/hbase/admin.rb +++ b/hbase-shell/src/main/ruby/hbase/admin.rb @@ -132,6 +132,38 @@ module Hbase end end + #---------------------------------------------------------------------------------------------- + # Enable/disable one split or merge switch + # Returns previous switch setting. + def splitormerge_switch(type, enableDisable) + switch_type = nil + if type == "SPLIT" + switch_type = org.apache.hadoop.hbase.client.Admin::MasterSwitchType::SPLIT + elsif type == "MERGE" + switch_type = org.apache.hadoop.hbase.client.Admin::MasterSwitchType::MERGE + else + raise ArgumentError, "only SPLIT or MERGE accepted for type!" + end + @admin.setSplitOrMergeEnabled( + java.lang.Boolean::valueOf(enableDisable), java.lang.Boolean::valueOf(false), + switch_type)[0] + end + + #---------------------------------------------------------------------------------------------- + # Query the current state of the split or merge switch. + # Returns the switch's state (true is enabled). + def splitormerge_enabled(type) + switch_type = nil + if type == "SPLIT" + switch_type = org.apache.hadoop.hbase.client.Admin::MasterSwitchType::SPLIT + elsif type == "MERGE" + switch_type = org.apache.hadoop.hbase.client.Admin::MasterSwitchType::MERGE + else + raise ArgumentError, "only SPLIT or MERGE accepted for type!" + end + @admin.isSplitOrMergeEnabled(switch_type) + end + def locate_region(table_name, row_key) locator = @connection.getRegionLocator(TableName.valueOf(table_name)) begin diff --git a/hbase-shell/src/main/ruby/shell.rb b/hbase-shell/src/main/ruby/shell.rb index 0ecd3d7..4144b91 100644 --- a/hbase-shell/src/main/ruby/shell.rb +++ b/hbase-shell/src/main/ruby/shell.rb @@ -333,6 +333,8 @@ Shell.load_command_group( catalogjanitor_enabled compact_rs trace + splitormerge_switch + splitormerge_enabled ], # TODO remove older hlog_roll command :aliases => { diff --git a/hbase-shell/src/main/ruby/shell/commands/splitormerge_enabled.rb b/hbase-shell/src/main/ruby/shell/commands/splitormerge_enabled.rb new file mode 100644 index 0000000..59c1f0a --- /dev/null +++ b/hbase-shell/src/main/ruby/shell/commands/splitormerge_enabled.rb @@ -0,0 +1,41 @@ +#!/usr/bin/env hbase-jruby +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with this +# work for additional information regarding copyright ownership. The ASF +# licenses this file to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# Prints the current split or merge status + +module Shell + module Commands + class SplitormergeEnabled < Command + def help + return <<-EOF +Query the switch's state. You can set switch type, 'SPLIT' or 'MERGE' +Examples: + + hbase> splitormerge_enabled 'SPLIT' +EOF + end + + def command(switch_type) + format_simple_command do + formatter.row([ + admin.splitormerge_enabled(switch_type)? "true" : "false" + ]) + end + end + end + end +end diff --git a/hbase-shell/src/main/ruby/shell/commands/splitormerge_switch.rb b/hbase-shell/src/main/ruby/shell/commands/splitormerge_switch.rb new file mode 100644 index 0000000..f8e14f2 --- /dev/null +++ b/hbase-shell/src/main/ruby/shell/commands/splitormerge_switch.rb @@ -0,0 +1,42 @@ +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +module Shell + module Commands + class SplitormergeSwitch < Command + def help + return <<-EOF +Enable/Disable one switch. You can set switch type 'SPLIT' or 'MERGE'. Returns previous split state. +Examples: + + hbase> splitormerge_switch 'SPLIT', true + hbase> splitormerge_switch 'SPLIT', false +EOF + end + + def command(switch_type, enableDisable) + format_simple_command do + formatter.row([ + admin.splitormerge_switch(switch_type, enableDisable)? "true" : "false" + ]) + end + end + end + end +end -- 1.9.3 (Apple Git-50)